[AArch64][SVE 25/32] Add support for SVE addressing modes

This patch adds most of the new SVE addressing modes and associated
operands.  A follow-on patch adds MUL VL, since handling it separately
makes the changes easier to read.

The patch also introduces a new "operand-dependent data" field to the
operand flags, based closely on the existing one for opcode flags.
For SVE this new field needs only 2 bits, but it could be widened
in future if necessary.

include/
	* opcode/aarch64.h (AARCH64_OPND_SVE_ADDR_RI_U6): New aarch64_opnd.
	(AARCH64_OPND_SVE_ADDR_RI_U6x2, AARCH64_OPND_SVE_ADDR_RI_U6x4)
	(AARCH64_OPND_SVE_ADDR_RI_U6x8, AARCH64_OPND_SVE_ADDR_RR)
	(AARCH64_OPND_SVE_ADDR_RR_LSL1, AARCH64_OPND_SVE_ADDR_RR_LSL2)
	(AARCH64_OPND_SVE_ADDR_RR_LSL3, AARCH64_OPND_SVE_ADDR_RX)
	(AARCH64_OPND_SVE_ADDR_RX_LSL1, AARCH64_OPND_SVE_ADDR_RX_LSL2)
	(AARCH64_OPND_SVE_ADDR_RX_LSL3, AARCH64_OPND_SVE_ADDR_RZ)
	(AARCH64_OPND_SVE_ADDR_RZ_LSL1, AARCH64_OPND_SVE_ADDR_RZ_LSL2)
	(AARCH64_OPND_SVE_ADDR_RZ_LSL3, AARCH64_OPND_SVE_ADDR_RZ_XTW_14)
	(AARCH64_OPND_SVE_ADDR_RZ_XTW_22, AARCH64_OPND_SVE_ADDR_RZ_XTW1_14)
	(AARCH64_OPND_SVE_ADDR_RZ_XTW1_22, AARCH64_OPND_SVE_ADDR_RZ_XTW2_14)
	(AARCH64_OPND_SVE_ADDR_RZ_XTW2_22, AARCH64_OPND_SVE_ADDR_RZ_XTW3_14)
	(AARCH64_OPND_SVE_ADDR_RZ_XTW3_22, AARCH64_OPND_SVE_ADDR_ZI_U5)
	(AARCH64_OPND_SVE_ADDR_ZI_U5x2, AARCH64_OPND_SVE_ADDR_ZI_U5x4)
	(AARCH64_OPND_SVE_ADDR_ZI_U5x8, AARCH64_OPND_SVE_ADDR_ZZ_LSL)
	(AARCH64_OPND_SVE_ADDR_ZZ_SXTW, AARCH64_OPND_SVE_ADDR_ZZ_UXTW):
	Likewise.

opcodes/
	* aarch64-tbl.h (AARCH64_OPERANDS): Add entries for the new SVE
	address operands.
	* aarch64-opc.h (FLD_SVE_imm6, FLD_SVE_msz, FLD_SVE_xs_14)
	(FLD_SVE_xs_22): New aarch64_field_kinds.
	(OPD_F_OD_MASK, OPD_F_OD_LSB, OPD_F_NO_ZR): New flags.
	(get_operand_specific_data): New function.
	* aarch64-opc.c (fields): Add entries for FLD_SVE_imm6, FLD_SVE_msz,
	FLD_SVE_xs_14 and FLD_SVE_xs_22.
	(operand_general_constraint_met_p): Handle the new SVE address
	operands.
	(sve_reg): New array.
	(get_addr_sve_reg_name): New function.
	(aarch64_print_operand): Handle the new SVE address operands.
	* aarch64-opc-2.c: Regenerate.
	* aarch64-asm.h (ins_sve_addr_ri_u6, ins_sve_addr_rr_lsl)
	(ins_sve_addr_rz_xtw, ins_sve_addr_zi_u5, ins_sve_addr_zz_lsl)
	(ins_sve_addr_zz_sxtw, ins_sve_addr_zz_uxtw): New inserters.
	* aarch64-asm.c (aarch64_ins_sve_addr_ri_u6): New function.
	(aarch64_ins_sve_addr_rr_lsl): Likewise.
	(aarch64_ins_sve_addr_rz_xtw): Likewise.
	(aarch64_ins_sve_addr_zi_u5): Likewise.
	(aarch64_ins_sve_addr_zz): Likewise.
	(aarch64_ins_sve_addr_zz_lsl): Likewise.
	(aarch64_ins_sve_addr_zz_sxtw): Likewise.
	(aarch64_ins_sve_addr_zz_uxtw): Likewise.
	* aarch64-asm-2.c: Regenerate.
	* aarch64-dis.h (ext_sve_addr_ri_u6, ext_sve_addr_rr_lsl)
	(ext_sve_addr_rz_xtw, ext_sve_addr_zi_u5, ext_sve_addr_zz_lsl)
	(ext_sve_addr_zz_sxtw, ext_sve_addr_zz_uxtw): New extractors.
	* aarch64-dis.c (aarch64_ext_sve_add_reg_imm): New function.
	(aarch64_ext_sve_addr_ri_u6): Likewise.
	(aarch64_ext_sve_addr_rr_lsl): Likewise.
	(aarch64_ext_sve_addr_rz_xtw): Likewise.
	(aarch64_ext_sve_addr_zi_u5): Likewise.
	(aarch64_ext_sve_addr_zz): Likewise.
	(aarch64_ext_sve_addr_zz_lsl): Likewise.
	(aarch64_ext_sve_addr_zz_sxtw): Likewise.
	(aarch64_ext_sve_addr_zz_uxtw): Likewise.
	* aarch64-dis-2.c: Regenerate.

gas/
	* config/tc-aarch64.c (REG_TYPE_SVE_BASE, REG_TYPE_SVE_OFFSET): New
	register types.
	(get_reg_expected_msg): Handle them.
	(aarch64_addr_reg_parse): New function, split out from
	aarch64_reg_parse_32_64.  Handle Z registers too.
	(aarch64_reg_parse_32_64): Call it.
	(parse_address_main): Add base_qualifier, offset_qualifier,
	base_type and offset_type parameters.  Handle SVE base and offset
	registers.
	(parse_address): Update call to parse_address_main.
	(parse_sve_address): New function.
	(parse_operands): Parse the new SVE address operands.
This commit is contained in:
Richard Sandiford 2016-09-21 16:55:49 +01:00
parent 2442d8466e
commit 4df068de52
15 changed files with 1035 additions and 64 deletions

View File

@ -1,3 +1,18 @@
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* config/tc-aarch64.c (REG_TYPE_SVE_BASE, REG_TYPE_SVE_OFFSET): New
register types.
(get_reg_expected_msg): Handle them.
(aarch64_addr_reg_parse): New function, split out from
aarch64_reg_parse_32_64. Handle Z registers too.
(aarch64_reg_parse_32_64): Call it.
(parse_address_main): Add base_qualifier, offset_qualifier,
base_type and offset_type parameters. Handle SVE base and offset
registers.
(parse_address): Update call to parse_address_main.
(parse_sve_address): New function.
(parse_operands): Parse the new SVE address operands.
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* config/tc-aarch64.c (SHIFTED_MUL): New parse_shift_mode.

View File

@ -272,9 +272,16 @@ struct reloc_entry
BASIC_REG_TYPE(PN) /* p[0-15] */ \
/* Typecheck: any 64-bit int reg (inc SP exc XZR). */ \
MULTI_REG_TYPE(R64_SP, REG_TYPE(R_64) | REG_TYPE(SP_64)) \
/* Typecheck: same, plus SVE registers. */ \
MULTI_REG_TYPE(SVE_BASE, REG_TYPE(R_64) | REG_TYPE(SP_64) \
| REG_TYPE(ZN)) \
/* Typecheck: x[0-30], w[0-30] or [xw]zr. */ \
MULTI_REG_TYPE(R_Z, REG_TYPE(R_32) | REG_TYPE(R_64) \
| REG_TYPE(Z_32) | REG_TYPE(Z_64)) \
/* Typecheck: same, plus SVE registers. */ \
MULTI_REG_TYPE(SVE_OFFSET, REG_TYPE(R_32) | REG_TYPE(R_64) \
| REG_TYPE(Z_32) | REG_TYPE(Z_64) \
| REG_TYPE(ZN)) \
/* Typecheck: x[0-30], w[0-30] or {w}sp. */ \
MULTI_REG_TYPE(R_SP, REG_TYPE(R_32) | REG_TYPE(R_64) \
| REG_TYPE(SP_32) | REG_TYPE(SP_64)) \
@ -358,9 +365,15 @@ get_reg_expected_msg (aarch64_reg_type reg_type)
case REG_TYPE_R64_SP:
msg = N_("64-bit integer or SP register expected");
break;
case REG_TYPE_SVE_BASE:
msg = N_("base register expected");
break;
case REG_TYPE_R_Z:
msg = N_("integer or zero register expected");
break;
case REG_TYPE_SVE_OFFSET:
msg = N_("offset register expected");
break;
case REG_TYPE_R_SP:
msg = N_("integer or SP register expected");
break;
@ -697,14 +710,16 @@ aarch64_check_reg_type (const reg_entry *reg, aarch64_reg_type type)
return (reg_type_masks[type] & (1 << reg->type)) != 0;
}
/* Try to parse a base or offset register. Return the register entry
on success, setting *QUALIFIER to the register qualifier. Return null
otherwise.
/* Try to parse a base or offset register. Allow SVE base and offset
registers if REG_TYPE includes SVE registers. Return the register
entry on success, setting *QUALIFIER to the register qualifier.
Return null otherwise.
Note that this function does not issue any diagnostics. */
static const reg_entry *
aarch64_reg_parse_32_64 (char **ccp, aarch64_opnd_qualifier_t *qualifier)
aarch64_addr_reg_parse (char **ccp, aarch64_reg_type reg_type,
aarch64_opnd_qualifier_t *qualifier)
{
char *str = *ccp;
const reg_entry *reg = parse_reg (&str);
@ -726,6 +741,24 @@ aarch64_reg_parse_32_64 (char **ccp, aarch64_opnd_qualifier_t *qualifier)
*qualifier = AARCH64_OPND_QLF_X;
break;
case REG_TYPE_ZN:
if ((reg_type_masks[reg_type] & (1 << REG_TYPE_ZN)) == 0
|| str[0] != '.')
return NULL;
switch (TOLOWER (str[1]))
{
case 's':
*qualifier = AARCH64_OPND_QLF_S_S;
break;
case 'd':
*qualifier = AARCH64_OPND_QLF_S_D;
break;
default:
return NULL;
}
str += 2;
break;
default:
return NULL;
}
@ -735,6 +768,18 @@ aarch64_reg_parse_32_64 (char **ccp, aarch64_opnd_qualifier_t *qualifier)
return reg;
}
/* Try to parse a base or offset register. Return the register entry
on success, setting *QUALIFIER to the register qualifier. Return null
otherwise.
Note that this function does not issue any diagnostics. */
static const reg_entry *
aarch64_reg_parse_32_64 (char **ccp, aarch64_opnd_qualifier_t *qualifier)
{
return aarch64_addr_reg_parse (ccp, REG_TYPE_R_Z_SP, qualifier);
}
/* Parse the qualifier of a vector register or vector element of type
REG_TYPE. Fill in *PARSED_TYPE and return TRUE if the parsing
succeeds; otherwise return FALSE.
@ -3209,8 +3254,8 @@ parse_shifter_operand_reloc (char **str, aarch64_opnd_info *operand,
The A64 instruction set has the following addressing modes:
Offset
[base] // in SIMD ld/st structure
[base{,#0}] // in ld/st exclusive
[base] // in SIMD ld/st structure
[base{,#0}] // in ld/st exclusive
[base{,#imm}]
[base,Xm{,LSL #imm}]
[base,Xm,SXTX {#imm}]
@ -3219,10 +3264,18 @@ parse_shifter_operand_reloc (char **str, aarch64_opnd_info *operand,
[base,#imm]!
Post-indexed
[base],#imm
[base],Xm // in SIMD ld/st structure
[base],Xm // in SIMD ld/st structure
PC-relative (literal)
label
=immediate
SVE:
[base,Zm.D{,LSL #imm}]
[base,Zm.S,(S|U)XTW {#imm}]
[base,Zm.D,(S|U)XTW {#imm}] // ignores top 32 bits of Zm.D elements
[Zn.S,#imm]
[Zn.D,#imm]
[Zn.S,Zm.S{,LSL #imm}] // in ADR
[Zn.D,Zm.D{,LSL #imm}] // in ADR
[Zn.D,Zm.D,(S|U)XTW {#imm}] // in ADR
(As a convenience, the notation "=immediate" is permitted in conjunction
with the pc-relative literal load instructions to automatically place an
@ -3249,19 +3302,27 @@ parse_shifter_operand_reloc (char **str, aarch64_opnd_info *operand,
.pcrel=1; .preind=1; .postind=0; .writeback=0
The shift/extension information, if any, will be stored in .shifter.
The base and offset qualifiers will be stored in *BASE_QUALIFIER and
*OFFSET_QUALIFIER respectively, with NIL being used if there's no
corresponding register.
It is the caller's responsibility to check for addressing modes not
BASE_TYPE says which types of base register should be accepted and
OFFSET_TYPE says the same for offset registers. In all other respects,
it is the caller's responsibility to check for addressing modes not
supported by the instruction, and to set inst.reloc.type. */
static bfd_boolean
parse_address_main (char **str, aarch64_opnd_info *operand)
parse_address_main (char **str, aarch64_opnd_info *operand,
aarch64_opnd_qualifier_t *base_qualifier,
aarch64_opnd_qualifier_t *offset_qualifier,
aarch64_reg_type base_type, aarch64_reg_type offset_type)
{
char *p = *str;
const reg_entry *reg;
aarch64_opnd_qualifier_t base_qualifier;
aarch64_opnd_qualifier_t offset_qualifier;
expressionS *exp = &inst.reloc.exp;
*base_qualifier = AARCH64_OPND_QLF_NIL;
*offset_qualifier = AARCH64_OPND_QLF_NIL;
if (! skip_past_char (&p, '['))
{
/* =immediate or label. */
@ -3336,10 +3397,10 @@ parse_address_main (char **str, aarch64_opnd_info *operand)
/* [ */
reg = aarch64_reg_parse_32_64 (&p, &base_qualifier);
if (!reg || !aarch64_check_reg_type (reg, REG_TYPE_R64_SP))
reg = aarch64_addr_reg_parse (&p, base_type, base_qualifier);
if (!reg || !aarch64_check_reg_type (reg, base_type))
{
set_syntax_error (_(get_reg_expected_msg (REG_TYPE_R64_SP)));
set_syntax_error (_(get_reg_expected_msg (base_type)));
return FALSE;
}
operand->addr.base_regno = reg->number;
@ -3350,12 +3411,12 @@ parse_address_main (char **str, aarch64_opnd_info *operand)
/* [Xn, */
operand->addr.preind = 1;
reg = aarch64_reg_parse_32_64 (&p, &offset_qualifier);
reg = aarch64_addr_reg_parse (&p, offset_type, offset_qualifier);
if (reg)
{
if (!aarch64_check_reg_type (reg, REG_TYPE_R_Z))
if (!aarch64_check_reg_type (reg, offset_type))
{
set_syntax_error (_(get_reg_expected_msg (REG_TYPE_R_Z)));
set_syntax_error (_(get_reg_expected_msg (offset_type)));
return FALSE;
}
@ -3379,13 +3440,19 @@ parse_address_main (char **str, aarch64_opnd_info *operand)
|| operand->shifter.kind == AARCH64_MOD_LSL
|| operand->shifter.kind == AARCH64_MOD_SXTX)
{
if (offset_qualifier == AARCH64_OPND_QLF_W)
if (*offset_qualifier == AARCH64_OPND_QLF_W)
{
set_syntax_error (_("invalid use of 32-bit register offset"));
return FALSE;
}
if (aarch64_get_qualifier_esize (*base_qualifier)
!= aarch64_get_qualifier_esize (*offset_qualifier))
{
set_syntax_error (_("offset has different size from base"));
return FALSE;
}
}
else if (offset_qualifier == AARCH64_OPND_QLF_X)
else if (*offset_qualifier == AARCH64_OPND_QLF_X)
{
set_syntax_error (_("invalid use of 64-bit register offset"));
return FALSE;
@ -3468,7 +3535,7 @@ parse_address_main (char **str, aarch64_opnd_info *operand)
return FALSE;
}
reg = aarch64_reg_parse_32_64 (&p, &offset_qualifier);
reg = aarch64_reg_parse_32_64 (&p, offset_qualifier);
if (reg)
{
/* [Xn],Xm */
@ -3513,7 +3580,21 @@ parse_address_main (char **str, aarch64_opnd_info *operand)
static bfd_boolean
parse_address (char **str, aarch64_opnd_info *operand)
{
return parse_address_main (str, operand);
aarch64_opnd_qualifier_t base_qualifier, offset_qualifier;
return parse_address_main (str, operand, &base_qualifier, &offset_qualifier,
REG_TYPE_R64_SP, REG_TYPE_R_Z);
}
/* Parse an address in which SVE vector registers are allowed.
The arguments have the same meaning as for parse_address_main.
Return TRUE on success. */
static bfd_boolean
parse_sve_address (char **str, aarch64_opnd_info *operand,
aarch64_opnd_qualifier_t *base_qualifier,
aarch64_opnd_qualifier_t *offset_qualifier)
{
return parse_address_main (str, operand, base_qualifier, offset_qualifier,
REG_TYPE_SVE_BASE, REG_TYPE_SVE_OFFSET);
}
/* Parse an operand for a MOVZ, MOVN or MOVK instruction.
@ -5123,7 +5204,7 @@ parse_operands (char *str, const aarch64_opcode *opcode)
int comma_skipped_p = 0;
aarch64_reg_type rtype;
struct vector_type_el vectype;
aarch64_opnd_qualifier_t qualifier;
aarch64_opnd_qualifier_t qualifier, base_qualifier, offset_qualifier;
aarch64_opnd_info *info = &inst.base.operands[i];
aarch64_reg_type reg_type;
@ -5757,6 +5838,7 @@ parse_operands (char *str, const aarch64_opcode *opcode)
case AARCH64_OPND_ADDR_REGOFF:
/* [<Xn|SP>, <R><m>{, <extend> {<amount>}}] */
po_misc_or_fail (parse_address (&str, info));
regoff_addr:
if (info->addr.pcrel || !info->addr.offset.is_reg
|| !info->addr.preind || info->addr.postind
|| info->addr.writeback)
@ -5856,6 +5938,123 @@ parse_operands (char *str, const aarch64_opcode *opcode)
/* No qualifier. */
break;
case AARCH64_OPND_SVE_ADDR_RI_U6:
case AARCH64_OPND_SVE_ADDR_RI_U6x2:
case AARCH64_OPND_SVE_ADDR_RI_U6x4:
case AARCH64_OPND_SVE_ADDR_RI_U6x8:
/* [X<n>{, #imm}]
but recognizing SVE registers. */
po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
&offset_qualifier));
if (base_qualifier != AARCH64_OPND_QLF_X)
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
sve_regimm:
if (info->addr.pcrel || info->addr.offset.is_reg
|| !info->addr.preind || info->addr.writeback)
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
if (inst.reloc.type != BFD_RELOC_UNUSED
|| inst.reloc.exp.X_op != O_constant)
{
/* Make sure this has priority over
"invalid addressing mode". */
set_fatal_syntax_error (_("constant offset required"));
goto failure;
}
info->addr.offset.imm = inst.reloc.exp.X_add_number;
break;
case AARCH64_OPND_SVE_ADDR_RR:
case AARCH64_OPND_SVE_ADDR_RR_LSL1:
case AARCH64_OPND_SVE_ADDR_RR_LSL2:
case AARCH64_OPND_SVE_ADDR_RR_LSL3:
case AARCH64_OPND_SVE_ADDR_RX:
case AARCH64_OPND_SVE_ADDR_RX_LSL1:
case AARCH64_OPND_SVE_ADDR_RX_LSL2:
case AARCH64_OPND_SVE_ADDR_RX_LSL3:
/* [<Xn|SP>, <R><m>{, lsl #<amount>}]
but recognizing SVE registers. */
po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
&offset_qualifier));
if (base_qualifier != AARCH64_OPND_QLF_X
|| offset_qualifier != AARCH64_OPND_QLF_X)
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
goto regoff_addr;
case AARCH64_OPND_SVE_ADDR_RZ:
case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
case AARCH64_OPND_SVE_ADDR_RZ_LSL3:
case AARCH64_OPND_SVE_ADDR_RZ_XTW_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_22:
/* [<Xn|SP>, Z<m>.D{, LSL #<amount>}]
[<Xn|SP>, Z<m>.<T>, <extend> {#<amount>}] */
po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
&offset_qualifier));
if (base_qualifier != AARCH64_OPND_QLF_X
|| (offset_qualifier != AARCH64_OPND_QLF_S_S
&& offset_qualifier != AARCH64_OPND_QLF_S_D))
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
info->qualifier = offset_qualifier;
goto regoff_addr;
case AARCH64_OPND_SVE_ADDR_ZI_U5:
case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
case AARCH64_OPND_SVE_ADDR_ZI_U5x8:
/* [Z<n>.<T>{, #imm}] */
po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
&offset_qualifier));
if (base_qualifier != AARCH64_OPND_QLF_S_S
&& base_qualifier != AARCH64_OPND_QLF_S_D)
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
info->qualifier = base_qualifier;
goto sve_regimm;
case AARCH64_OPND_SVE_ADDR_ZZ_LSL:
case AARCH64_OPND_SVE_ADDR_ZZ_SXTW:
case AARCH64_OPND_SVE_ADDR_ZZ_UXTW:
/* [Z<n>.<T>, Z<m>.<T>{, LSL #<amount>}]
[Z<n>.D, Z<m>.D, <extend> {#<amount>}]
We don't reject:
[Z<n>.S, Z<m>.S, <extend> {#<amount>}]
here since we get better error messages by leaving it to
the qualifier checking routines. */
po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
&offset_qualifier));
if ((base_qualifier != AARCH64_OPND_QLF_S_S
&& base_qualifier != AARCH64_OPND_QLF_S_D)
|| offset_qualifier != base_qualifier)
{
set_syntax_error (_("invalid addressing mode"));
goto failure;
}
info->qualifier = base_qualifier;
goto regoff_addr;
case AARCH64_OPND_SYSREG:
if ((val = parse_sys_reg (&str, aarch64_sys_regs_hsh, 1, 0))
== PARSE_FAIL)

View File

@ -1,3 +1,23 @@
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* opcode/aarch64.h (AARCH64_OPND_SVE_ADDR_RI_U6): New aarch64_opnd.
(AARCH64_OPND_SVE_ADDR_RI_U6x2, AARCH64_OPND_SVE_ADDR_RI_U6x4)
(AARCH64_OPND_SVE_ADDR_RI_U6x8, AARCH64_OPND_SVE_ADDR_RR)
(AARCH64_OPND_SVE_ADDR_RR_LSL1, AARCH64_OPND_SVE_ADDR_RR_LSL2)
(AARCH64_OPND_SVE_ADDR_RR_LSL3, AARCH64_OPND_SVE_ADDR_RX)
(AARCH64_OPND_SVE_ADDR_RX_LSL1, AARCH64_OPND_SVE_ADDR_RX_LSL2)
(AARCH64_OPND_SVE_ADDR_RX_LSL3, AARCH64_OPND_SVE_ADDR_RZ)
(AARCH64_OPND_SVE_ADDR_RZ_LSL1, AARCH64_OPND_SVE_ADDR_RZ_LSL2)
(AARCH64_OPND_SVE_ADDR_RZ_LSL3, AARCH64_OPND_SVE_ADDR_RZ_XTW_14)
(AARCH64_OPND_SVE_ADDR_RZ_XTW_22, AARCH64_OPND_SVE_ADDR_RZ_XTW1_14)
(AARCH64_OPND_SVE_ADDR_RZ_XTW1_22, AARCH64_OPND_SVE_ADDR_RZ_XTW2_14)
(AARCH64_OPND_SVE_ADDR_RZ_XTW2_22, AARCH64_OPND_SVE_ADDR_RZ_XTW3_14)
(AARCH64_OPND_SVE_ADDR_RZ_XTW3_22, AARCH64_OPND_SVE_ADDR_ZI_U5)
(AARCH64_OPND_SVE_ADDR_ZI_U5x2, AARCH64_OPND_SVE_ADDR_ZI_U5x4)
(AARCH64_OPND_SVE_ADDR_ZI_U5x8, AARCH64_OPND_SVE_ADDR_ZZ_LSL)
(AARCH64_OPND_SVE_ADDR_ZZ_SXTW, AARCH64_OPND_SVE_ADDR_ZZ_UXTW):
Likewise.
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* opcode/aarch64.h (AARCH64_OPND_SVE_PATTERN_SCALED): New

View File

@ -244,6 +244,45 @@ enum aarch64_opnd
AARCH64_OPND_PRFOP, /* Prefetch operation. */
AARCH64_OPND_BARRIER_PSB, /* Barrier operand for PSB. */
AARCH64_OPND_SVE_ADDR_RI_U6, /* SVE [<Xn|SP>, #<uimm6>]. */
AARCH64_OPND_SVE_ADDR_RI_U6x2, /* SVE [<Xn|SP>, #<uimm6>*2]. */
AARCH64_OPND_SVE_ADDR_RI_U6x4, /* SVE [<Xn|SP>, #<uimm6>*4]. */
AARCH64_OPND_SVE_ADDR_RI_U6x8, /* SVE [<Xn|SP>, #<uimm6>*8]. */
AARCH64_OPND_SVE_ADDR_RR, /* SVE [<Xn|SP>, <Xm|XZR>]. */
AARCH64_OPND_SVE_ADDR_RR_LSL1, /* SVE [<Xn|SP>, <Xm|XZR>, LSL #1]. */
AARCH64_OPND_SVE_ADDR_RR_LSL2, /* SVE [<Xn|SP>, <Xm|XZR>, LSL #2]. */
AARCH64_OPND_SVE_ADDR_RR_LSL3, /* SVE [<Xn|SP>, <Xm|XZR>, LSL #3]. */
AARCH64_OPND_SVE_ADDR_RX, /* SVE [<Xn|SP>, <Xm>]. */
AARCH64_OPND_SVE_ADDR_RX_LSL1, /* SVE [<Xn|SP>, <Xm>, LSL #1]. */
AARCH64_OPND_SVE_ADDR_RX_LSL2, /* SVE [<Xn|SP>, <Xm>, LSL #2]. */
AARCH64_OPND_SVE_ADDR_RX_LSL3, /* SVE [<Xn|SP>, <Xm>, LSL #3]. */
AARCH64_OPND_SVE_ADDR_RZ, /* SVE [<Xn|SP>, Zm.D]. */
AARCH64_OPND_SVE_ADDR_RZ_LSL1, /* SVE [<Xn|SP>, Zm.D, LSL #1]. */
AARCH64_OPND_SVE_ADDR_RZ_LSL2, /* SVE [<Xn|SP>, Zm.D, LSL #2]. */
AARCH64_OPND_SVE_ADDR_RZ_LSL3, /* SVE [<Xn|SP>, Zm.D, LSL #3]. */
AARCH64_OPND_SVE_ADDR_RZ_XTW_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW].
Bit 14 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW].
Bit 22 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW1_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #1].
Bit 14 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW1_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #1].
Bit 22 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW2_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #2].
Bit 14 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW2_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #2].
Bit 22 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW3_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #3].
Bit 14 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_RZ_XTW3_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #3].
Bit 22 controls S/U choice. */
AARCH64_OPND_SVE_ADDR_ZI_U5, /* SVE [Zn.<T>, #<uimm5>]. */
AARCH64_OPND_SVE_ADDR_ZI_U5x2, /* SVE [Zn.<T>, #<uimm5>*2]. */
AARCH64_OPND_SVE_ADDR_ZI_U5x4, /* SVE [Zn.<T>, #<uimm5>*4]. */
AARCH64_OPND_SVE_ADDR_ZI_U5x8, /* SVE [Zn.<T>, #<uimm5>*8]. */
AARCH64_OPND_SVE_ADDR_ZZ_LSL, /* SVE [Zn.<T>, Zm,<T>, LSL #<msz>]. */
AARCH64_OPND_SVE_ADDR_ZZ_SXTW, /* SVE [Zn.<T>, Zm,<T>, SXTW #<msz>]. */
AARCH64_OPND_SVE_ADDR_ZZ_UXTW, /* SVE [Zn.<T>, Zm,<T>, UXTW #<msz>]. */
AARCH64_OPND_SVE_PATTERN, /* SVE vector pattern enumeration. */
AARCH64_OPND_SVE_PATTERN_SCALED, /* Likewise, with additional MUL factor. */
AARCH64_OPND_SVE_PRFOP, /* SVE prefetch operation. */

View File

@ -1,3 +1,45 @@
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* aarch64-tbl.h (AARCH64_OPERANDS): Add entries for the new SVE
address operands.
* aarch64-opc.h (FLD_SVE_imm6, FLD_SVE_msz, FLD_SVE_xs_14)
(FLD_SVE_xs_22): New aarch64_field_kinds.
(OPD_F_OD_MASK, OPD_F_OD_LSB, OPD_F_NO_ZR): New flags.
(get_operand_specific_data): New function.
* aarch64-opc.c (fields): Add entries for FLD_SVE_imm6, FLD_SVE_msz,
FLD_SVE_xs_14 and FLD_SVE_xs_22.
(operand_general_constraint_met_p): Handle the new SVE address
operands.
(sve_reg): New array.
(get_addr_sve_reg_name): New function.
(aarch64_print_operand): Handle the new SVE address operands.
* aarch64-opc-2.c: Regenerate.
* aarch64-asm.h (ins_sve_addr_ri_u6, ins_sve_addr_rr_lsl)
(ins_sve_addr_rz_xtw, ins_sve_addr_zi_u5, ins_sve_addr_zz_lsl)
(ins_sve_addr_zz_sxtw, ins_sve_addr_zz_uxtw): New inserters.
* aarch64-asm.c (aarch64_ins_sve_addr_ri_u6): New function.
(aarch64_ins_sve_addr_rr_lsl): Likewise.
(aarch64_ins_sve_addr_rz_xtw): Likewise.
(aarch64_ins_sve_addr_zi_u5): Likewise.
(aarch64_ins_sve_addr_zz): Likewise.
(aarch64_ins_sve_addr_zz_lsl): Likewise.
(aarch64_ins_sve_addr_zz_sxtw): Likewise.
(aarch64_ins_sve_addr_zz_uxtw): Likewise.
* aarch64-asm-2.c: Regenerate.
* aarch64-dis.h (ext_sve_addr_ri_u6, ext_sve_addr_rr_lsl)
(ext_sve_addr_rz_xtw, ext_sve_addr_zi_u5, ext_sve_addr_zz_lsl)
(ext_sve_addr_zz_sxtw, ext_sve_addr_zz_uxtw): New extractors.
* aarch64-dis.c (aarch64_ext_sve_add_reg_imm): New function.
(aarch64_ext_sve_addr_ri_u6): Likewise.
(aarch64_ext_sve_addr_rr_lsl): Likewise.
(aarch64_ext_sve_addr_rz_xtw): Likewise.
(aarch64_ext_sve_addr_zi_u5): Likewise.
(aarch64_ext_sve_addr_zz): Likewise.
(aarch64_ext_sve_addr_zz_lsl): Likewise.
(aarch64_ext_sve_addr_zz_sxtw): Likewise.
(aarch64_ext_sve_addr_zz_uxtw): Likewise.
* aarch64-dis-2.c: Regenerate.
2016-09-21 Richard Sandiford <richard.sandiford@arm.com>
* aarch64-tbl.h (AARCH64_OPERANDS): Add an entry for

View File

@ -480,21 +480,21 @@ aarch64_insert_operand (const aarch64_operand *self,
case 27:
case 35:
case 36:
case 92:
case 93:
case 94:
case 95:
case 96:
case 97:
case 98:
case 99:
case 100:
case 101:
case 102:
case 103:
case 104:
case 105:
case 108:
case 123:
case 124:
case 125:
case 126:
case 127:
case 128:
case 129:
case 130:
case 131:
case 132:
case 133:
case 134:
case 135:
case 136:
case 139:
return aarch64_ins_regno (self, info, code, inst);
case 12:
return aarch64_ins_reg_extended (self, info, code, inst);
@ -531,8 +531,8 @@ aarch64_insert_operand (const aarch64_operand *self,
case 68:
case 69:
case 70:
case 89:
case 91:
case 120:
case 122:
return aarch64_ins_imm (self, info, code, inst);
case 38:
case 39:
@ -583,12 +583,50 @@ aarch64_insert_operand (const aarch64_operand *self,
return aarch64_ins_prfop (self, info, code, inst);
case 88:
return aarch64_ins_hint (self, info, code, inst);
case 89:
case 90:
return aarch64_ins_sve_scale (self, info, code, inst);
case 91:
case 92:
return aarch64_ins_sve_addr_ri_u6 (self, info, code, inst);
case 93:
case 94:
case 95:
case 96:
case 97:
case 98:
case 99:
case 100:
case 101:
case 102:
case 103:
case 104:
return aarch64_ins_sve_addr_rr_lsl (self, info, code, inst);
case 105:
case 106:
return aarch64_ins_sve_index (self, info, code, inst);
case 107:
case 108:
case 109:
case 110:
case 111:
case 112:
return aarch64_ins_sve_addr_rz_xtw (self, info, code, inst);
case 113:
case 114:
case 115:
case 116:
return aarch64_ins_sve_addr_zi_u5 (self, info, code, inst);
case 117:
return aarch64_ins_sve_addr_zz_lsl (self, info, code, inst);
case 118:
return aarch64_ins_sve_addr_zz_sxtw (self, info, code, inst);
case 119:
return aarch64_ins_sve_addr_zz_uxtw (self, info, code, inst);
case 121:
return aarch64_ins_sve_scale (self, info, code, inst);
case 137:
return aarch64_ins_sve_index (self, info, code, inst);
case 138:
case 140:
return aarch64_ins_sve_reglist (self, info, code, inst);
default: assert (0); abort ();
}

View File

@ -745,6 +745,114 @@ aarch64_ins_reg_shifted (const aarch64_operand *self ATTRIBUTE_UNUSED,
return NULL;
}
/* Encode an SVE address [X<n>, #<SVE_imm6> << <shift>], where <SVE_imm6>
is a 6-bit unsigned number and where <shift> is SELF's operand-dependent
value. fields[0] specifies the base register field. */
const char *
aarch64_ins_sve_addr_ri_u6 (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
int factor = 1 << get_operand_specific_data (self);
insert_field (self->fields[0], code, info->addr.base_regno, 0);
insert_field (FLD_SVE_imm6, code, info->addr.offset.imm / factor, 0);
return NULL;
}
/* Encode an SVE address [X<n>, X<m>{, LSL #<shift>}], where <shift>
is SELF's operand-dependent value. fields[0] specifies the base
register field and fields[1] specifies the offset register field. */
const char *
aarch64_ins_sve_addr_rr_lsl (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
insert_field (self->fields[0], code, info->addr.base_regno, 0);
insert_field (self->fields[1], code, info->addr.offset.regno, 0);
return NULL;
}
/* Encode an SVE address [X<n>, Z<m>.<T>, (S|U)XTW {#<shift>}], where
<shift> is SELF's operand-dependent value. fields[0] specifies the
base register field, fields[1] specifies the offset register field and
fields[2] is a single-bit field that selects SXTW over UXTW. */
const char *
aarch64_ins_sve_addr_rz_xtw (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
insert_field (self->fields[0], code, info->addr.base_regno, 0);
insert_field (self->fields[1], code, info->addr.offset.regno, 0);
if (info->shifter.kind == AARCH64_MOD_UXTW)
insert_field (self->fields[2], code, 0, 0);
else
insert_field (self->fields[2], code, 1, 0);
return NULL;
}
/* Encode an SVE address [Z<n>.<T>, #<imm5> << <shift>], where <imm5> is a
5-bit unsigned number and where <shift> is SELF's operand-dependent value.
fields[0] specifies the base register field. */
const char *
aarch64_ins_sve_addr_zi_u5 (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
int factor = 1 << get_operand_specific_data (self);
insert_field (self->fields[0], code, info->addr.base_regno, 0);
insert_field (FLD_imm5, code, info->addr.offset.imm / factor, 0);
return NULL;
}
/* Encode an SVE address [Z<n>.<T>, Z<m>.<T>{, <modifier> {#<msz>}}],
where <modifier> is fixed by the instruction and where <msz> is a
2-bit unsigned number. fields[0] specifies the base register field
and fields[1] specifies the offset register field. */
static const char *
aarch64_ext_sve_addr_zz (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code)
{
insert_field (self->fields[0], code, info->addr.base_regno, 0);
insert_field (self->fields[1], code, info->addr.offset.regno, 0);
insert_field (FLD_SVE_msz, code, info->shifter.amount, 0);
return NULL;
}
/* Encode an SVE address [Z<n>.<T>, Z<m>.<T>{, LSL #<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
const char *
aarch64_ins_sve_addr_zz_lsl (const aarch64_operand *self,
const aarch64_opnd_info *info, aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code);
}
/* Encode an SVE address [Z<n>.<T>, Z<m>.<T>, SXTW {#<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
const char *
aarch64_ins_sve_addr_zz_sxtw (const aarch64_operand *self,
const aarch64_opnd_info *info,
aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code);
}
/* Encode an SVE address [Z<n>.<T>, Z<m>.<T>, UXTW {#<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
const char *
aarch64_ins_sve_addr_zz_uxtw (const aarch64_operand *self,
const aarch64_opnd_info *info,
aarch64_insn *code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code);
}
/* Encode Zn[MM], where MM has a 7-bit triangular encoding. The fields
array specifies which field to use for Zn. MM is encoded in the
concatenation of imm5 and SVE_tszh, with imm5 being the less

View File

@ -69,6 +69,13 @@ AARCH64_DECL_OPD_INSERTER (ins_hint);
AARCH64_DECL_OPD_INSERTER (ins_prfop);
AARCH64_DECL_OPD_INSERTER (ins_reg_extended);
AARCH64_DECL_OPD_INSERTER (ins_reg_shifted);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_ri_u6);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_rr_lsl);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_rz_xtw);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_zi_u5);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_zz_lsl);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_zz_sxtw);
AARCH64_DECL_OPD_INSERTER (ins_sve_addr_zz_uxtw);
AARCH64_DECL_OPD_INSERTER (ins_sve_index);
AARCH64_DECL_OPD_INSERTER (ins_sve_reglist);
AARCH64_DECL_OPD_INSERTER (ins_sve_scale);

View File

@ -10426,21 +10426,21 @@ aarch64_extract_operand (const aarch64_operand *self,
case 27:
case 35:
case 36:
case 92:
case 93:
case 94:
case 95:
case 96:
case 97:
case 98:
case 99:
case 100:
case 101:
case 102:
case 103:
case 104:
case 105:
case 108:
case 123:
case 124:
case 125:
case 126:
case 127:
case 128:
case 129:
case 130:
case 131:
case 132:
case 133:
case 134:
case 135:
case 136:
case 139:
return aarch64_ext_regno (self, info, code, inst);
case 8:
return aarch64_ext_regrt_sysins (self, info, code, inst);
@ -10482,8 +10482,8 @@ aarch64_extract_operand (const aarch64_operand *self,
case 68:
case 69:
case 70:
case 89:
case 91:
case 120:
case 122:
return aarch64_ext_imm (self, info, code, inst);
case 38:
case 39:
@ -10536,12 +10536,50 @@ aarch64_extract_operand (const aarch64_operand *self,
return aarch64_ext_prfop (self, info, code, inst);
case 88:
return aarch64_ext_hint (self, info, code, inst);
case 89:
case 90:
return aarch64_ext_sve_scale (self, info, code, inst);
case 91:
case 92:
return aarch64_ext_sve_addr_ri_u6 (self, info, code, inst);
case 93:
case 94:
case 95:
case 96:
case 97:
case 98:
case 99:
case 100:
case 101:
case 102:
case 103:
case 104:
return aarch64_ext_sve_addr_rr_lsl (self, info, code, inst);
case 105:
case 106:
return aarch64_ext_sve_index (self, info, code, inst);
case 107:
case 108:
case 109:
case 110:
case 111:
case 112:
return aarch64_ext_sve_addr_rz_xtw (self, info, code, inst);
case 113:
case 114:
case 115:
case 116:
return aarch64_ext_sve_addr_zi_u5 (self, info, code, inst);
case 117:
return aarch64_ext_sve_addr_zz_lsl (self, info, code, inst);
case 118:
return aarch64_ext_sve_addr_zz_sxtw (self, info, code, inst);
case 119:
return aarch64_ext_sve_addr_zz_uxtw (self, info, code, inst);
case 121:
return aarch64_ext_sve_scale (self, info, code, inst);
case 137:
return aarch64_ext_sve_index (self, info, code, inst);
case 138:
case 140:
return aarch64_ext_sve_reglist (self, info, code, inst);
default: assert (0); abort ();
}

View File

@ -1186,6 +1186,152 @@ aarch64_ext_reg_shifted (const aarch64_operand *self ATTRIBUTE_UNUSED,
return 1;
}
/* Decode an SVE address [<base>, #<offset> << <shift>], where <offset>
is given by the OFFSET parameter and where <shift> is SELF's operand-
dependent value. fields[0] specifies the base register field <base>. */
static int
aarch64_ext_sve_addr_reg_imm (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
int64_t offset)
{
info->addr.base_regno = extract_field (self->fields[0], code, 0);
info->addr.offset.imm = offset * (1 << get_operand_specific_data (self));
info->addr.offset.is_reg = FALSE;
info->addr.writeback = FALSE;
info->addr.preind = TRUE;
info->shifter.operator_present = FALSE;
info->shifter.amount_present = FALSE;
return 1;
}
/* Decode an SVE address [X<n>, #<SVE_imm6> << <shift>], where <SVE_imm6>
is a 6-bit unsigned number and where <shift> is SELF's operand-dependent
value. fields[0] specifies the base register field. */
int
aarch64_ext_sve_addr_ri_u6 (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
int offset = extract_field (FLD_SVE_imm6, code, 0);
return aarch64_ext_sve_addr_reg_imm (self, info, code, offset);
}
/* Decode an SVE address [X<n>, X<m>{, LSL #<shift>}], where <shift>
is SELF's operand-dependent value. fields[0] specifies the base
register field and fields[1] specifies the offset register field. */
int
aarch64_ext_sve_addr_rr_lsl (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
int index;
index = extract_field (self->fields[1], code, 0);
if (index == 31 && (self->flags & OPD_F_NO_ZR) != 0)
return 0;
info->addr.base_regno = extract_field (self->fields[0], code, 0);
info->addr.offset.regno = index;
info->addr.offset.is_reg = TRUE;
info->addr.writeback = FALSE;
info->addr.preind = TRUE;
info->shifter.kind = AARCH64_MOD_LSL;
info->shifter.amount = get_operand_specific_data (self);
info->shifter.operator_present = (info->shifter.amount != 0);
info->shifter.amount_present = (info->shifter.amount != 0);
return 1;
}
/* Decode an SVE address [X<n>, Z<m>.<T>, (S|U)XTW {#<shift>}], where
<shift> is SELF's operand-dependent value. fields[0] specifies the
base register field, fields[1] specifies the offset register field and
fields[2] is a single-bit field that selects SXTW over UXTW. */
int
aarch64_ext_sve_addr_rz_xtw (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
info->addr.base_regno = extract_field (self->fields[0], code, 0);
info->addr.offset.regno = extract_field (self->fields[1], code, 0);
info->addr.offset.is_reg = TRUE;
info->addr.writeback = FALSE;
info->addr.preind = TRUE;
if (extract_field (self->fields[2], code, 0))
info->shifter.kind = AARCH64_MOD_SXTW;
else
info->shifter.kind = AARCH64_MOD_UXTW;
info->shifter.amount = get_operand_specific_data (self);
info->shifter.operator_present = TRUE;
info->shifter.amount_present = (info->shifter.amount != 0);
return 1;
}
/* Decode an SVE address [Z<n>.<T>, #<imm5> << <shift>], where <imm5> is a
5-bit unsigned number and where <shift> is SELF's operand-dependent value.
fields[0] specifies the base register field. */
int
aarch64_ext_sve_addr_zi_u5 (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
int offset = extract_field (FLD_imm5, code, 0);
return aarch64_ext_sve_addr_reg_imm (self, info, code, offset);
}
/* Decode an SVE address [Z<n>.<T>, Z<m>.<T>{, <modifier> {#<msz>}}],
where <modifier> is given by KIND and where <msz> is a 2-bit unsigned
number. fields[0] specifies the base register field and fields[1]
specifies the offset register field. */
static int
aarch64_ext_sve_addr_zz (const aarch64_operand *self, aarch64_opnd_info *info,
aarch64_insn code, enum aarch64_modifier_kind kind)
{
info->addr.base_regno = extract_field (self->fields[0], code, 0);
info->addr.offset.regno = extract_field (self->fields[1], code, 0);
info->addr.offset.is_reg = TRUE;
info->addr.writeback = FALSE;
info->addr.preind = TRUE;
info->shifter.kind = kind;
info->shifter.amount = extract_field (FLD_SVE_msz, code, 0);
info->shifter.operator_present = (kind != AARCH64_MOD_LSL
|| info->shifter.amount != 0);
info->shifter.amount_present = (info->shifter.amount != 0);
return 1;
}
/* Decode an SVE address [Z<n>.<T>, Z<m>.<T>{, LSL #<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
int
aarch64_ext_sve_addr_zz_lsl (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code, AARCH64_MOD_LSL);
}
/* Decode an SVE address [Z<n>.<T>, Z<m>.<T>, SXTW {#<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
int
aarch64_ext_sve_addr_zz_sxtw (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code, AARCH64_MOD_SXTW);
}
/* Decode an SVE address [Z<n>.<T>, Z<m>.<T>, UXTW {#<msz>}], where
<msz> is a 2-bit unsigned number. fields[0] specifies the base register
field and fields[1] specifies the offset register field. */
int
aarch64_ext_sve_addr_zz_uxtw (const aarch64_operand *self,
aarch64_opnd_info *info, aarch64_insn code,
const aarch64_inst *inst ATTRIBUTE_UNUSED)
{
return aarch64_ext_sve_addr_zz (self, info, code, AARCH64_MOD_UXTW);
}
/* Decode Zn[MM], where MM has a 7-bit triangular encoding. The fields
array specifies which field to use for Zn. MM is encoded in the
concatenation of imm5 and SVE_tszh, with imm5 being the less

View File

@ -91,6 +91,13 @@ AARCH64_DECL_OPD_EXTRACTOR (ext_hint);
AARCH64_DECL_OPD_EXTRACTOR (ext_prfop);
AARCH64_DECL_OPD_EXTRACTOR (ext_reg_extended);
AARCH64_DECL_OPD_EXTRACTOR (ext_reg_shifted);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_ri_u6);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_rr_lsl);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_rz_xtw);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_zi_u5);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_zz_lsl);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_zz_sxtw);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_addr_zz_uxtw);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_index);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_reglist);
AARCH64_DECL_OPD_EXTRACTOR (ext_sve_scale);

View File

@ -113,6 +113,37 @@ const struct aarch64_operand aarch64_operands[] =
{AARCH64_OPND_CLASS_SYSTEM, "BARRIER_ISB", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {}, "the ISB option name SY or an optional 4-bit unsigned immediate"},
{AARCH64_OPND_CLASS_SYSTEM, "PRFOP", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {}, "a prefetch operation specifier"},
{AARCH64_OPND_CLASS_SYSTEM, "BARRIER_PSB", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {}, "the PSB option name CSYNC"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RI_U6", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn}, "an address with a 6-bit unsigned offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RI_U6x2", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn}, "an address with a 6-bit unsigned offset, multiplied by 2"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RI_U6x4", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn}, "an address with a 6-bit unsigned offset, multiplied by 4"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RI_U6x8", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn}, "an address with a 6-bit unsigned offset, multiplied by 8"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RR", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RR_LSL1", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RR_LSL2", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RR_LSL3", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX", (0 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL1", (1 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL2", (2 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL3", (3 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_LSL1", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_LSL2", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_LSL3", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW_14", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW_22", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW1_14", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW1_22", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW2_14", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW2_22", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW3_14", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_XTW3_22", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZI_U5", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn}, "an address with a 5-bit unsigned offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZI_U5x2", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn}, "an address with a 5-bit unsigned offset, multiplied by 2"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZI_U5x4", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn}, "an address with a 5-bit unsigned offset, multiplied by 4"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZI_U5x8", 3 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn}, "an address with a 5-bit unsigned offset, multiplied by 8"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZZ_LSL", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZZ_SXTW", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZZ_UXTW", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_IMMEDIATE, "SVE_PATTERN", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_pattern}, "an enumeration value such as POW2"},
{AARCH64_OPND_CLASS_IMMEDIATE, "SVE_PATTERN_SCALED", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_pattern}, "an enumeration value such as POW2"},
{AARCH64_OPND_CLASS_IMMEDIATE, "SVE_PRFOP", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_prfop}, "an enumeration value such as PLDL1KEEP"},

View File

@ -280,9 +280,13 @@ const aarch64_field fields[] =
{ 5, 5 }, /* SVE_Zn: SVE vector register, bits [9,5]. */
{ 0, 5 }, /* SVE_Zt: SVE vector register, bits [4,0]. */
{ 16, 4 }, /* SVE_imm4: 4-bit immediate field. */
{ 16, 6 }, /* SVE_imm6: 6-bit immediate field. */
{ 10, 2 }, /* SVE_msz: 2-bit shift amount for ADR. */
{ 5, 5 }, /* SVE_pattern: vector pattern enumeration. */
{ 0, 4 }, /* SVE_prfop: prefetch operation for SVE PRF[BHWD]. */
{ 22, 2 }, /* SVE_tszh: triangular size select high, bits [23,22]. */
{ 14, 1 }, /* SVE_xs_14: UXTW/SXTW select (bit 14). */
{ 22, 1 } /* SVE_xs_22: UXTW/SXTW select (bit 22). */
};
enum aarch64_operand_class
@ -1368,9 +1372,9 @@ operand_general_constraint_met_p (const aarch64_opnd_info *opnds, int idx,
const aarch64_opcode *opcode,
aarch64_operand_error *mismatch_detail)
{
unsigned num;
unsigned num, modifiers;
unsigned char size;
int64_t imm;
int64_t imm, min_value, max_value;
const aarch64_opnd_info *opnd = opnds + idx;
aarch64_opnd_qualifier_t qualifier = opnd->qualifier;
@ -1662,6 +1666,113 @@ operand_general_constraint_met_p (const aarch64_opnd_info *opnds, int idx,
}
break;
case AARCH64_OPND_SVE_ADDR_RI_U6:
case AARCH64_OPND_SVE_ADDR_RI_U6x2:
case AARCH64_OPND_SVE_ADDR_RI_U6x4:
case AARCH64_OPND_SVE_ADDR_RI_U6x8:
min_value = 0;
max_value = 63;
sve_imm_offset:
assert (!opnd->addr.offset.is_reg);
assert (opnd->addr.preind);
num = 1 << get_operand_specific_data (&aarch64_operands[type]);
min_value *= num;
max_value *= num;
if (opnd->shifter.operator_present
|| opnd->shifter.amount_present)
{
set_other_error (mismatch_detail, idx,
_("invalid addressing mode"));
return 0;
}
if (!value_in_range_p (opnd->addr.offset.imm, min_value, max_value))
{
set_offset_out_of_range_error (mismatch_detail, idx,
min_value, max_value);
return 0;
}
if (!value_aligned_p (opnd->addr.offset.imm, num))
{
set_unaligned_error (mismatch_detail, idx, num);
return 0;
}
break;
case AARCH64_OPND_SVE_ADDR_RR:
case AARCH64_OPND_SVE_ADDR_RR_LSL1:
case AARCH64_OPND_SVE_ADDR_RR_LSL2:
case AARCH64_OPND_SVE_ADDR_RR_LSL3:
case AARCH64_OPND_SVE_ADDR_RX:
case AARCH64_OPND_SVE_ADDR_RX_LSL1:
case AARCH64_OPND_SVE_ADDR_RX_LSL2:
case AARCH64_OPND_SVE_ADDR_RX_LSL3:
case AARCH64_OPND_SVE_ADDR_RZ:
case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
case AARCH64_OPND_SVE_ADDR_RZ_LSL3:
modifiers = 1 << AARCH64_MOD_LSL;
sve_rr_operand:
assert (opnd->addr.offset.is_reg);
assert (opnd->addr.preind);
if ((aarch64_operands[type].flags & OPD_F_NO_ZR) != 0
&& opnd->addr.offset.regno == 31)
{
set_other_error (mismatch_detail, idx,
_("index register xzr is not allowed"));
return 0;
}
if (((1 << opnd->shifter.kind) & modifiers) == 0
|| (opnd->shifter.amount
!= get_operand_specific_data (&aarch64_operands[type])))
{
set_other_error (mismatch_detail, idx,
_("invalid addressing mode"));
return 0;
}
break;
case AARCH64_OPND_SVE_ADDR_RZ_XTW_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_22:
modifiers = (1 << AARCH64_MOD_SXTW) | (1 << AARCH64_MOD_UXTW);
goto sve_rr_operand;
case AARCH64_OPND_SVE_ADDR_ZI_U5:
case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
case AARCH64_OPND_SVE_ADDR_ZI_U5x8:
min_value = 0;
max_value = 31;
goto sve_imm_offset;
case AARCH64_OPND_SVE_ADDR_ZZ_LSL:
modifiers = 1 << AARCH64_MOD_LSL;
sve_zz_operand:
assert (opnd->addr.offset.is_reg);
assert (opnd->addr.preind);
if (((1 << opnd->shifter.kind) & modifiers) == 0
|| opnd->shifter.amount < 0
|| opnd->shifter.amount > 3)
{
set_other_error (mismatch_detail, idx,
_("invalid addressing mode"));
return 0;
}
break;
case AARCH64_OPND_SVE_ADDR_ZZ_SXTW:
modifiers = (1 << AARCH64_MOD_SXTW);
goto sve_zz_operand;
case AARCH64_OPND_SVE_ADDR_ZZ_UXTW:
modifiers = 1 << AARCH64_MOD_UXTW;
goto sve_zz_operand;
default:
break;
}
@ -2330,6 +2441,17 @@ static const char *int_reg[2][2][32] = {
#undef R64
#undef R32
};
/* Names of the SVE vector registers, first with .S suffixes,
then with .D suffixes. */
static const char *sve_reg[2][32] = {
#define ZS(X) "z" #X ".s"
#define ZD(X) "z" #X ".d"
BANK (ZS, ZS (31)), BANK (ZD, ZD (31))
#undef ZD
#undef ZS
};
#undef BANK
/* Return the integer register name.
@ -2373,6 +2495,17 @@ get_offset_int_reg_name (const aarch64_opnd_info *opnd)
}
}
/* Get the name of the SVE vector offset register in OPND, using the operand
qualifier to decide whether the suffix should be .S or .D. */
static inline const char *
get_addr_sve_reg_name (int regno, aarch64_opnd_qualifier_t qualifier)
{
assert (qualifier == AARCH64_OPND_QLF_S_S
|| qualifier == AARCH64_OPND_QLF_S_D);
return sve_reg[qualifier == AARCH64_OPND_QLF_S_D][regno];
}
/* Types for expanding an encoded 8-bit value to a floating-point value. */
typedef union
@ -2948,18 +3081,65 @@ aarch64_print_operand (char *buf, size_t size, bfd_vma pc,
break;
case AARCH64_OPND_ADDR_REGOFF:
case AARCH64_OPND_SVE_ADDR_RR:
case AARCH64_OPND_SVE_ADDR_RR_LSL1:
case AARCH64_OPND_SVE_ADDR_RR_LSL2:
case AARCH64_OPND_SVE_ADDR_RR_LSL3:
case AARCH64_OPND_SVE_ADDR_RX:
case AARCH64_OPND_SVE_ADDR_RX_LSL1:
case AARCH64_OPND_SVE_ADDR_RX_LSL2:
case AARCH64_OPND_SVE_ADDR_RX_LSL3:
print_register_offset_address
(buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1),
get_offset_int_reg_name (opnd));
break;
case AARCH64_OPND_SVE_ADDR_RZ:
case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
case AARCH64_OPND_SVE_ADDR_RZ_LSL3:
case AARCH64_OPND_SVE_ADDR_RZ_XTW_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW1_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW2_22:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_14:
case AARCH64_OPND_SVE_ADDR_RZ_XTW3_22:
print_register_offset_address
(buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1),
get_addr_sve_reg_name (opnd->addr.offset.regno, opnd->qualifier));
break;
case AARCH64_OPND_ADDR_SIMM7:
case AARCH64_OPND_ADDR_SIMM9:
case AARCH64_OPND_ADDR_SIMM9_2:
case AARCH64_OPND_SVE_ADDR_RI_U6:
case AARCH64_OPND_SVE_ADDR_RI_U6x2:
case AARCH64_OPND_SVE_ADDR_RI_U6x4:
case AARCH64_OPND_SVE_ADDR_RI_U6x8:
print_immediate_offset_address
(buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1));
break;
case AARCH64_OPND_SVE_ADDR_ZI_U5:
case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
case AARCH64_OPND_SVE_ADDR_ZI_U5x8:
print_immediate_offset_address
(buf, size, opnd,
get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier));
break;
case AARCH64_OPND_SVE_ADDR_ZZ_LSL:
case AARCH64_OPND_SVE_ADDR_ZZ_SXTW:
case AARCH64_OPND_SVE_ADDR_ZZ_UXTW:
print_register_offset_address
(buf, size, opnd,
get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier),
get_addr_sve_reg_name (opnd->addr.offset.regno, opnd->qualifier));
break;
case AARCH64_OPND_ADDR_UIMM12:
name = get_64bit_int_reg_name (opnd->addr.base_regno, 1);
if (opnd->addr.offset.imm)

View File

@ -107,9 +107,13 @@ enum aarch64_field_kind
FLD_SVE_Zn,
FLD_SVE_Zt,
FLD_SVE_imm4,
FLD_SVE_imm6,
FLD_SVE_msz,
FLD_SVE_pattern,
FLD_SVE_prfop,
FLD_SVE_tszh,
FLD_SVE_xs_14,
FLD_SVE_xs_22,
};
/* Field description. */
@ -156,6 +160,9 @@ extern const aarch64_operand aarch64_operands[];
value by 2 to get the value
of an immediate operand. */
#define OPD_F_MAYBE_SP 0x00000010 /* May potentially be SP. */
#define OPD_F_OD_MASK 0x00000060 /* Operand-dependent data. */
#define OPD_F_OD_LSB 5
#define OPD_F_NO_ZR 0x00000080 /* ZR index not allowed. */
static inline bfd_boolean
operand_has_inserter (const aarch64_operand *operand)
@ -187,6 +194,13 @@ operand_maybe_stack_pointer (const aarch64_operand *operand)
return (operand->flags & OPD_F_MAYBE_SP) ? TRUE : FALSE;
}
/* Return the value of the operand-specific data field (OPD_F_OD_MASK). */
static inline unsigned int
get_operand_specific_data (const aarch64_operand *operand)
{
return (operand->flags & OPD_F_OD_MASK) >> OPD_F_OD_LSB;
}
/* Return the total width of the operand *OPERAND. */
static inline unsigned
get_operand_fields_width (const aarch64_operand *operand)

View File

@ -2818,8 +2818,95 @@ struct aarch64_opcode aarch64_opcode_table[] =
"the ISB option name SY or an optional 4-bit unsigned immediate") \
Y(SYSTEM, prfop, "PRFOP", 0, F(), \
"a prefetch operation specifier") \
Y (SYSTEM, hint, "BARRIER_PSB", 0, F (), \
Y(SYSTEM, hint, "BARRIER_PSB", 0, F (), \
"the PSB option name CSYNC") \
Y(ADDRESS, sve_addr_ri_u6, "SVE_ADDR_RI_U6", 0 << OPD_F_OD_LSB, \
F(FLD_Rn), "an address with a 6-bit unsigned offset") \
Y(ADDRESS, sve_addr_ri_u6, "SVE_ADDR_RI_U6x2", 1 << OPD_F_OD_LSB, \
F(FLD_Rn), \
"an address with a 6-bit unsigned offset, multiplied by 2") \
Y(ADDRESS, sve_addr_ri_u6, "SVE_ADDR_RI_U6x4", 2 << OPD_F_OD_LSB, \
F(FLD_Rn), \
"an address with a 6-bit unsigned offset, multiplied by 4") \
Y(ADDRESS, sve_addr_ri_u6, "SVE_ADDR_RI_U6x8", 3 << OPD_F_OD_LSB, \
F(FLD_Rn), \
"an address with a 6-bit unsigned offset, multiplied by 8") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RR", 0 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_Rm), "an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RR_LSL1", 1 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_Rm), "an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RR_LSL2", 2 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_Rm), "an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RR_LSL3", 3 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_Rm), "an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RX", \
(0 << OPD_F_OD_LSB) | OPD_F_NO_ZR, F(FLD_Rn,FLD_Rm), \
"an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RX_LSL1", \
(1 << OPD_F_OD_LSB) | OPD_F_NO_ZR, F(FLD_Rn,FLD_Rm), \
"an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RX_LSL2", \
(2 << OPD_F_OD_LSB) | OPD_F_NO_ZR, F(FLD_Rn,FLD_Rm), \
"an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RX_LSL3", \
(3 << OPD_F_OD_LSB) | OPD_F_NO_ZR, F(FLD_Rn,FLD_Rm), \
"an address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RZ", 0 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RZ_LSL1", 1 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RZ_LSL2", 2 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RZ_LSL3", 3 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW_14", \
0 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW_22", \
0 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW1_14", \
1 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW1_22", \
1 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW2_14", \
2 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW2_22", \
2 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW3_14", \
3 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_14), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_rz_xtw, "SVE_ADDR_RZ_XTW3_22", \
3 << OPD_F_OD_LSB, F(FLD_Rn,FLD_SVE_Zm_16,FLD_SVE_xs_22), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_zi_u5, "SVE_ADDR_ZI_U5", 0 << OPD_F_OD_LSB, \
F(FLD_SVE_Zn), "an address with a 5-bit unsigned offset") \
Y(ADDRESS, sve_addr_zi_u5, "SVE_ADDR_ZI_U5x2", 1 << OPD_F_OD_LSB, \
F(FLD_SVE_Zn), \
"an address with a 5-bit unsigned offset, multiplied by 2") \
Y(ADDRESS, sve_addr_zi_u5, "SVE_ADDR_ZI_U5x4", 2 << OPD_F_OD_LSB, \
F(FLD_SVE_Zn), \
"an address with a 5-bit unsigned offset, multiplied by 4") \
Y(ADDRESS, sve_addr_zi_u5, "SVE_ADDR_ZI_U5x8", 3 << OPD_F_OD_LSB, \
F(FLD_SVE_Zn), \
"an address with a 5-bit unsigned offset, multiplied by 8") \
Y(ADDRESS, sve_addr_zz_lsl, "SVE_ADDR_ZZ_LSL", 0, \
F(FLD_SVE_Zn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_zz_sxtw, "SVE_ADDR_ZZ_SXTW", 0, \
F(FLD_SVE_Zn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(ADDRESS, sve_addr_zz_uxtw, "SVE_ADDR_ZZ_UXTW", 0, \
F(FLD_SVE_Zn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \
Y(IMMEDIATE, imm, "SVE_PATTERN", 0, F(FLD_SVE_pattern), \
"an enumeration value such as POW2") \
Y(IMMEDIATE, sve_scale, "SVE_PATTERN_SCALED", 0, \