X-Git-Url: http://git.efficios.com/?a=blobdiff_plain;f=opcodes%2Faarch64-tbl.h;h=48872e4c373bbcf344b3dc34bec20872b5972e58;hb=441af85bd9c68dbc0c2a1dbe23bf07c6cb3c3f5d;hp=dc7661a9689acfa547dd041315ca5f61904f45a2;hpb=1f4cd317b6606fcb2a582a3fc1c1f9437ef7d7b6;p=deliverable%2Fbinutils-gdb.git diff --git a/opcodes/aarch64-tbl.h b/opcodes/aarch64-tbl.h index dc7661a968..48872e4c37 100644 --- a/opcodes/aarch64-tbl.h +++ b/opcodes/aarch64-tbl.h @@ -1,6 +1,6 @@ /* aarch64-tbl.h -- AArch64 opcode description table and instruction operand description table. - Copyright (C) 2012-2019 Free Software Foundation, Inc. + Copyright (C) 2012-2020 Free Software Foundation, Inc. This file is part of the GNU opcodes library. @@ -2324,9 +2324,6 @@ static const aarch64_feature_set aarch64_feature_fp = AARCH64_FEATURE (AARCH64_FEATURE_FP, 0); static const aarch64_feature_set aarch64_feature_simd = AARCH64_FEATURE (AARCH64_FEATURE_SIMD, 0); -static const aarch64_feature_set aarch64_feature_crypto = - AARCH64_FEATURE (AARCH64_FEATURE_CRYPTO | AARCH64_FEATURE_AES - | AARCH64_FEATURE_SHA2 | AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0); static const aarch64_feature_set aarch64_feature_crc = AARCH64_FEATURE (AARCH64_FEATURE_CRC, 0); static const aarch64_feature_set aarch64_feature_lse = @@ -2363,9 +2360,6 @@ static const aarch64_feature_set aarch64_feature_aes = AARCH64_FEATURE (AARCH64_FEATURE_V8 | AARCH64_FEATURE_AES, 0); static const aarch64_feature_set aarch64_feature_v8_4 = AARCH64_FEATURE (AARCH64_FEATURE_V8_4, 0); -static const aarch64_feature_set aarch64_feature_crypto_v8_2 = - AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_CRYPTO - | AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0); static const aarch64_feature_set aarch64_feature_sm4 = AARCH64_FEATURE (AARCH64_FEATURE_V8_2 | AARCH64_FEATURE_SM4 | AARCH64_FEATURE_SIMD | AARCH64_FEATURE_FP, 0); @@ -2423,7 +2417,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve = #define CORE &aarch64_feature_v8 #define FP &aarch64_feature_fp #define SIMD &aarch64_feature_simd -#define CRYPTO &aarch64_feature_crypto #define CRC &aarch64_feature_crc #define LSE &aarch64_feature_lse #define LOR &aarch64_feature_lor @@ -2443,7 +2436,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve = #define ARMV8_4 &aarch64_feature_v8_4 #define SHA3 &aarch64_feature_sha3 #define SM4 &aarch64_feature_sm4 -#define CRYPTO_V8_2 &aarch64_feature_crypto_v8_2 #define FP_F16_V8_2 &aarch64_feature_fp_16_v8_2 #define DOTPROD &aarch64_feature_dotprod #define ARMV8_5 &aarch64_feature_v8_5 @@ -2476,8 +2468,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve = { NAME, OPCODE, MASK, CLASS, OP, SIMD, OPS, QUALS, FLAGS, 0, 0, NULL } #define _SIMD_INSN(NAME,OPCODE,MASK,CLASS,OP,OPS,QUALS,FLAGS,VERIFIER) \ { NAME, OPCODE, MASK, CLASS, OP, SIMD, OPS, QUALS, FLAGS, 0, 0, VERIFIER } -#define CRYP_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ - { NAME, OPCODE, MASK, CLASS, 0, CRYPTO, OPS, QUALS, FLAGS, 0, 0, NULL } #define _CRC_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ { NAME, OPCODE, MASK, CLASS, 0, CRC, OPS, QUALS, FLAGS, 0, 0, NULL } #define _LSE_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ @@ -2510,8 +2500,6 @@ static const aarch64_feature_set aarch64_feature_f64mm_sve = { NAME, OPCODE, MASK, CLASS, 0, AES, OPS, QUALS, FLAGS, 0, 0, NULL } #define V8_4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ { NAME, OPCODE, MASK, CLASS, 0, ARMV8_4, OPS, QUALS, FLAGS, 0, 0, NULL } -#define CRYPTO_V8_2_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ - { NAME, OPCODE, MASK, CLASS, 0, CRYPTO_V8_2, OPS, QUALS, FLAGS, 0, NULL } #define SHA3_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ { NAME, OPCODE, MASK, CLASS, 0, SHA3, OPS, QUALS, FLAGS, 0, 0, NULL } #define SM4_INSN(NAME,OPCODE,MASK,CLASS,OPS,QUALS,FLAGS) \ @@ -4805,8 +4793,8 @@ struct aarch64_opcode aarch64_opcode_table[] = SVE2_INSNC ("shsub", 0x44128000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 2), SVE2_INSNC ("shsubr", 0x44168000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 2), SVE2_INSN ("sli", 0x4500f400, 0xff20fc00, sve_shift_tsz_bhsd, 0, OP3 (SVE_Zd, SVE_Zn, SVE_SHLIMM_UNPRED), OP_SVE_VVU_BHSD, 0, 0), - SVE2_INSNC ("smaxp", 0x4414a000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 0), - SVE2_INSNC ("sminp", 0x4416a000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 0), + SVE2_INSNC ("smaxp", 0x4414a000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 2), + SVE2_INSNC ("sminp", 0x4416a000, 0xff3fe000, sve_size_bhsd, 0, OP4 (SVE_Zd, SVE_Pg3, SVE_Zd, SVE_Zn), OP_SVE_VMVV_BHSD, 0, C_SCAN_MOVPRFX, 2), SVE2_INSNC ("smlalb", 0x44a08000, 0xffe0f400, sve_misc, 0, OP3 (SVE_Zd, SVE_Zn, SVE_Zm3_11_INDEX), OP_SVE_SHH, 0, C_SCAN_MOVPRFX, 0), SVE2_INSNC ("smlalb", 0x44e08000, 0xffe0f400, sve_misc, 0, OP3 (SVE_Zd, SVE_Zn, SVE_Zm4_11_INDEX), OP_SVE_DSS, 0, C_SCAN_MOVPRFX, 0), SVE2_INSNC ("smlalb", 0x44004000, 0xff20fc00, sve_size_hsd, 0, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_VVV_HSD_BHS, 0, C_SCAN_MOVPRFX, 0), @@ -5085,28 +5073,28 @@ struct aarch64_opcode aarch64_opcode_table[] = INT8MATMUL_SVE_INSNC ("usdot", 0x44a01800, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm3_INDEX), OP_SVE_SBB, 0, C_SCAN_MOVPRFX, 0), INT8MATMUL_SVE_INSNC ("sudot", 0x44a01c00, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm3_INDEX), OP_SVE_SBB, 0, C_SCAN_MOVPRFX, 0), F32MATMUL_SVE_INSNC ("fmmla", 0x64a0e400, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_VVV_S, 0, C_SCAN_MOVPRFX, 0), - F64MATMUL_SVE_INSNC ("fmmla", 0x64c0e400, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_VVV_D, 0, C_SCAN_MOVPRFX, 0), - F64MATMUL_SVE_INSN ("ld1rob", 0xa4200000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX), OP_SVE_BZU, F_OD(1), 0), - F64MATMUL_SVE_INSN ("ld1roh", 0xa4a00000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX), OP_SVE_HZU, F_OD(1), 0), - F64MATMUL_SVE_INSN ("ld1row", 0xa5200000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX), OP_SVE_SZU, F_OD(1), 0), - F64MATMUL_SVE_INSN ("ld1rod", 0xa5a00000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX), OP_SVE_DZU, F_OD(1), 0), + F64MATMUL_SVE_INSNC ("fmmla", 0x64e0e400, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_VVV_D, 0, C_SCAN_MOVPRFX, 0), + F64MATMUL_SVE_INSN ("ld1rob", 0xa4200000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX), OP_SVE_BZU, F_OD(1), 0), + F64MATMUL_SVE_INSN ("ld1roh", 0xa4a00000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX_LSL1), OP_SVE_HZU, F_OD(1), 0), + F64MATMUL_SVE_INSN ("ld1row", 0xa5200000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX_LSL2), OP_SVE_SZU, F_OD(1), 0), + F64MATMUL_SVE_INSN ("ld1rod", 0xa5a00000, 0xffe0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RX_LSL3), OP_SVE_DZU, F_OD(1), 0), F64MATMUL_SVE_INSN ("ld1rob", 0xa4202000, 0xfff0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RI_S4x32), OP_SVE_BZU, F_OD(1), 0), F64MATMUL_SVE_INSN ("ld1roh", 0xa4a02000, 0xfff0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RI_S4x32), OP_SVE_HZU, F_OD(1), 0), F64MATMUL_SVE_INSN ("ld1row", 0xa5202000, 0xfff0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RI_S4x32), OP_SVE_SZU, F_OD(1), 0), F64MATMUL_SVE_INSN ("ld1rod", 0xa5a02000, 0xfff0e000, sve_misc, OP3 (SVE_ZtxN, SVE_Pg3, SVE_ADDR_RI_S4x32), OP_SVE_DZU, F_OD(1), 0), F64MATMUL_SVE_INSN ("zip1", 0x05a00000, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), F64MATMUL_SVE_INSN ("zip2", 0x05a00400, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), - F64MATMUL_SVE_INSN ("uzip1", 0x05a00800, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), - F64MATMUL_SVE_INSN ("uzip2", 0x05a00c00, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), + F64MATMUL_SVE_INSN ("uzp1", 0x05a00800, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), + F64MATMUL_SVE_INSN ("uzp2", 0x05a00c00, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), F64MATMUL_SVE_INSN ("trn1", 0x05a01800, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), F64MATMUL_SVE_INSN ("trn2", 0x05a01c00, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_QQQ, 0, 0), /* Matrix Multiply advanced SIMD instructions. */ INT8MATMUL_INSN ("smmla", 0x4e80a400, 0xffe0fc00, aarch64_misc, OP3 (Vd, Vn, Vm), QL_MMLA64, 0), INT8MATMUL_INSN ("ummla", 0x6e80a400, 0xffe0fc00, aarch64_misc, OP3 (Vd, Vn, Vm), QL_MMLA64, 0), INT8MATMUL_INSN ("usmmla", 0x4e80ac00, 0xffe0fc00, aarch64_misc, OP3 (Vd, Vn, Vm), QL_MMLA64, 0), - INT8MATMUL_INSN ("usdot", 0x4e809c00, 0xffe0fc00, aarch64_misc, OP3 (Vd, Vn, Vm), QL_V3DOT, F_SIZEQ), - INT8MATMUL_INSN ("usdot", 0x4f80f000, 0xffc0f400, dotproduct, OP3 (Vd, Vn, Em), QL_V2DOT, F_SIZEQ), - INT8MATMUL_INSN ("sudot", 0x4f00f000, 0xffc0f400, dotproduct, OP3 (Vd, Vn, Em), QL_V2DOT, F_SIZEQ), + INT8MATMUL_INSN ("usdot", 0x0e809c00, 0xbfe0fc00, aarch64_misc, OP3 (Vd, Vn, Vm), QL_V3DOT, F_SIZEQ), + INT8MATMUL_INSN ("usdot", 0x0f80f000, 0xbfc0f400, dotproduct, OP3 (Vd, Vn, Em), QL_V2DOT, F_SIZEQ), + INT8MATMUL_INSN ("sudot", 0x0f00f000, 0xbfc0f400, dotproduct, OP3 (Vd, Vn, Em), QL_V2DOT, F_SIZEQ), /* BFloat instructions. */ BFLOAT16_SVE_INSNC ("bfdot", 0x64608000, 0xffe0fc00, sve_misc, OP3 (SVE_Zd, SVE_Zn, SVE_Zm_16), OP_SVE_SHH, 0, C_SCAN_MOVPRFX, 0),