Update year range in copyright notice of all files.
[deliverable/binutils-gdb.git] / include / opcode / aarch64.h
index b0eb6175d079609c7a203491651386340a0bd675..d3d86ca8f8e8f2cf9710ec9063def2f3fc914133 100644 (file)
@@ -1,6 +1,6 @@
 /* AArch64 assembler/disassembler support.
 
-   Copyright (C) 2009-2016 Free Software Foundation, Inc.
+   Copyright (C) 2009-2017 Free Software Foundation, Inc.
    Contributed by ARM Ltd.
 
    This file is part of GNU Binutils.
@@ -39,6 +39,7 @@ typedef uint32_t aarch64_insn;
 /* The following bitmasks control CPU features.  */
 #define AARCH64_FEATURE_V8     0x00000001      /* All processors.  */
 #define AARCH64_FEATURE_V8_2   0x00000020      /* ARMv8.2 processors.  */
+#define AARCH64_FEATURE_V8_3   0x00000040      /* ARMv8.3 processors.  */
 #define AARCH64_FEATURE_CRYPTO 0x00010000      /* Crypto instructions.  */
 #define AARCH64_FEATURE_FP     0x00020000      /* FP instructions.  */
 #define AARCH64_FEATURE_SIMD   0x00040000      /* SIMD instructions.  */
@@ -51,32 +52,25 @@ typedef uint32_t aarch64_insn;
 #define AARCH64_FEATURE_F16    0x02000000      /* v8.2 FP16 instructions.  */
 #define AARCH64_FEATURE_RAS    0x04000000      /* RAS Extensions.  */
 #define AARCH64_FEATURE_PROFILE        0x08000000      /* Statistical Profiling.  */
+#define AARCH64_FEATURE_SVE    0x10000000      /* SVE instructions.  */
 
 /* Architectures are the sum of the base and extensions.  */
 #define AARCH64_ARCH_V8                AARCH64_FEATURE (AARCH64_FEATURE_V8, \
                                                 AARCH64_FEATURE_FP  \
                                                 | AARCH64_FEATURE_SIMD)
-#define AARCH64_ARCH_V8_1      AARCH64_FEATURE (AARCH64_FEATURE_V8, \
-                                                AARCH64_FEATURE_FP  \
-                                                | AARCH64_FEATURE_SIMD \
-                                                | AARCH64_FEATURE_CRC  \
+#define AARCH64_ARCH_V8_1      AARCH64_FEATURE (AARCH64_ARCH_V8, \
+                                                AARCH64_FEATURE_CRC    \
                                                 | AARCH64_FEATURE_V8_1 \
                                                 | AARCH64_FEATURE_LSE  \
                                                 | AARCH64_FEATURE_PAN  \
                                                 | AARCH64_FEATURE_LOR  \
                                                 | AARCH64_FEATURE_RDMA)
-#define AARCH64_ARCH_V8_2      AARCH64_FEATURE (AARCH64_FEATURE_V8,    \
+#define AARCH64_ARCH_V8_2      AARCH64_FEATURE (AARCH64_ARCH_V8_1,     \
                                                 AARCH64_FEATURE_V8_2   \
                                                 | AARCH64_FEATURE_F16  \
-                                                | AARCH64_FEATURE_RAS  \
-                                                | AARCH64_FEATURE_FP   \
-                                                | AARCH64_FEATURE_SIMD \
-                                                | AARCH64_FEATURE_CRC  \
-                                                | AARCH64_FEATURE_V8_1 \
-                                                | AARCH64_FEATURE_LSE  \
-                                                | AARCH64_FEATURE_PAN  \
-                                                | AARCH64_FEATURE_LOR  \
-                                                | AARCH64_FEATURE_RDMA)
+                                                | AARCH64_FEATURE_RAS)
+#define AARCH64_ARCH_V8_3      AARCH64_FEATURE (AARCH64_ARCH_V8_2,     \
+                                                AARCH64_FEATURE_V8_3)
 
 #define AARCH64_ARCH_NONE      AARCH64_FEATURE (0, 0)
 #define AARCH64_ANY            AARCH64_FEATURE (-1, 0) /* Any basic core.  */
@@ -119,7 +113,6 @@ enum aarch64_operand_class
   AARCH64_OPND_CLASS_SIMD_ELEMENT,
   AARCH64_OPND_CLASS_SISD_REG,
   AARCH64_OPND_CLASS_SIMD_REGLIST,
-  AARCH64_OPND_CLASS_CP_REG,
   AARCH64_OPND_CLASS_SVE_REG,
   AARCH64_OPND_CLASS_PRED_REG,
   AARCH64_OPND_CLASS_ADDRESS,
@@ -146,6 +139,7 @@ enum aarch64_opnd
 
   AARCH64_OPND_Rd_SP,  /* Integer Rd or SP.  */
   AARCH64_OPND_Rn_SP,  /* Integer Rn or SP.  */
+  AARCH64_OPND_Rm_SP,  /* Integer Rm or SP.  */
   AARCH64_OPND_PAIRREG,        /* Paired register operand.  */
   AARCH64_OPND_Rm_EXT, /* Integer Rm extended.  */
   AARCH64_OPND_Rm_SFT, /* Integer Rm shifted.  */
@@ -175,8 +169,8 @@ enum aarch64_opnd
                           structure to all lanes.  */
   AARCH64_OPND_LEt,    /* AdvSIMD Vector Element list.  */
 
-  AARCH64_OPND_Cn,     /* Co-processor register in CRn field.  */
-  AARCH64_OPND_Cm,     /* Co-processor register in CRm field.  */
+  AARCH64_OPND_CRn,    /* Co-processor register in CRn field.  */
+  AARCH64_OPND_CRm,    /* Co-processor register in CRm field.  */
 
   AARCH64_OPND_IDX,    /* AdvSIMD EXT index operand.  */
   AARCH64_OPND_IMM_VLSL,/* Immediate for shifting vector registers left.  */
@@ -200,6 +194,7 @@ enum aarch64_opnd
   AARCH64_OPND_BIT_NUM,        /* Immediate.  */
   AARCH64_OPND_EXCEPTION,/* imm16 operand in exception instructions.  */
   AARCH64_OPND_CCMP_IMM,/* Immediate in conditional compare instructions.  */
+  AARCH64_OPND_SIMM5,  /* 5-bit signed immediate in the imm5 field.  */
   AARCH64_OPND_NZCV,   /* Flag bit specifier giving an alternative value for
                           each condition flag.  */
 
@@ -208,6 +203,9 @@ enum aarch64_opnd
   AARCH64_OPND_HALF,   /* #<imm16>{, LSL #<shift>} operand in move wide.  */
   AARCH64_OPND_FBITS,  /* FP #<fbits> operand in e.g. SCVTF */
   AARCH64_OPND_IMM_MOV,        /* Immediate operand for the MOV alias.  */
+  AARCH64_OPND_IMM_ROT1,       /* Immediate rotate operand for FCMLA.  */
+  AARCH64_OPND_IMM_ROT2,       /* Immediate rotate operand for indexed FCMLA.  */
+  AARCH64_OPND_IMM_ROT3,       /* Immediate rotate operand for FCADD.  */
 
   AARCH64_OPND_COND,   /* Standard condition as the last operand.  */
   AARCH64_OPND_COND1,  /* Same as the above, but excluding AL and NV.  */
@@ -229,6 +227,7 @@ enum aarch64_opnd
                                   friendly feature of using LDR/STR as the
                                   the mnemonic name for LDUR/STUR instructions
                                   wherever there is no ambiguity.  */
+  AARCH64_OPND_ADDR_SIMM10,    /* Address of signed 10-bit immediate.  */
   AARCH64_OPND_ADDR_UIMM12,    /* Address of unsigned 12-bit immediate.  */
   AARCH64_OPND_SIMD_ADDR_SIMPLE,/* Address of ld/st multiple structures.  */
   AARCH64_OPND_SIMD_ADDR_POST, /* Address of ld/st multiple post-indexed.  */
@@ -244,6 +243,63 @@ enum aarch64_opnd
   AARCH64_OPND_PRFOP,          /* Prefetch operation.  */
   AARCH64_OPND_BARRIER_PSB,    /* Barrier operand for PSB.  */
 
+  AARCH64_OPND_SVE_ADDR_RI_S4xVL,   /* SVE [<Xn|SP>, #<simm4>, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_S4x2xVL, /* SVE [<Xn|SP>, #<simm4>*2, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_S4x3xVL, /* SVE [<Xn|SP>, #<simm4>*3, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_S4x4xVL, /* SVE [<Xn|SP>, #<simm4>*4, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_S6xVL,   /* SVE [<Xn|SP>, #<simm6>, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_S9xVL,   /* SVE [<Xn|SP>, #<simm9>, MUL VL].  */
+  AARCH64_OPND_SVE_ADDR_RI_U6,     /* SVE [<Xn|SP>, #<uimm6>].  */
+  AARCH64_OPND_SVE_ADDR_RI_U6x2,    /* SVE [<Xn|SP>, #<uimm6>*2].  */
+  AARCH64_OPND_SVE_ADDR_RI_U6x4,    /* SVE [<Xn|SP>, #<uimm6>*4].  */
+  AARCH64_OPND_SVE_ADDR_RI_U6x8,    /* SVE [<Xn|SP>, #<uimm6>*8].  */
+  AARCH64_OPND_SVE_ADDR_RR,        /* SVE [<Xn|SP>, <Xm|XZR>].  */
+  AARCH64_OPND_SVE_ADDR_RR_LSL1,    /* SVE [<Xn|SP>, <Xm|XZR>, LSL #1].  */
+  AARCH64_OPND_SVE_ADDR_RR_LSL2,    /* SVE [<Xn|SP>, <Xm|XZR>, LSL #2].  */
+  AARCH64_OPND_SVE_ADDR_RR_LSL3,    /* SVE [<Xn|SP>, <Xm|XZR>, LSL #3].  */
+  AARCH64_OPND_SVE_ADDR_RX,        /* SVE [<Xn|SP>, <Xm>].  */
+  AARCH64_OPND_SVE_ADDR_RX_LSL1,    /* SVE [<Xn|SP>, <Xm>, LSL #1].  */
+  AARCH64_OPND_SVE_ADDR_RX_LSL2,    /* SVE [<Xn|SP>, <Xm>, LSL #2].  */
+  AARCH64_OPND_SVE_ADDR_RX_LSL3,    /* SVE [<Xn|SP>, <Xm>, LSL #3].  */
+  AARCH64_OPND_SVE_ADDR_RZ,        /* SVE [<Xn|SP>, Zm.D].  */
+  AARCH64_OPND_SVE_ADDR_RZ_LSL1,    /* SVE [<Xn|SP>, Zm.D, LSL #1].  */
+  AARCH64_OPND_SVE_ADDR_RZ_LSL2,    /* SVE [<Xn|SP>, Zm.D, LSL #2].  */
+  AARCH64_OPND_SVE_ADDR_RZ_LSL3,    /* SVE [<Xn|SP>, Zm.D, LSL #3].  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW_14,  /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW].
+                                      Bit 14 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW_22,  /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW].
+                                      Bit 22 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW1_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #1].
+                                      Bit 14 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW1_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #1].
+                                      Bit 22 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW2_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #2].
+                                      Bit 14 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW2_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #2].
+                                      Bit 22 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW3_14, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #3].
+                                      Bit 14 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_RZ_XTW3_22, /* SVE [<Xn|SP>, Zm.<T>, (S|U)XTW #3].
+                                      Bit 22 controls S/U choice.  */
+  AARCH64_OPND_SVE_ADDR_ZI_U5,     /* SVE [Zn.<T>, #<uimm5>].  */
+  AARCH64_OPND_SVE_ADDR_ZI_U5x2,    /* SVE [Zn.<T>, #<uimm5>*2].  */
+  AARCH64_OPND_SVE_ADDR_ZI_U5x4,    /* SVE [Zn.<T>, #<uimm5>*4].  */
+  AARCH64_OPND_SVE_ADDR_ZI_U5x8,    /* SVE [Zn.<T>, #<uimm5>*8].  */
+  AARCH64_OPND_SVE_ADDR_ZZ_LSL,     /* SVE [Zn.<T>, Zm,<T>, LSL #<msz>].  */
+  AARCH64_OPND_SVE_ADDR_ZZ_SXTW,    /* SVE [Zn.<T>, Zm,<T>, SXTW #<msz>].  */
+  AARCH64_OPND_SVE_ADDR_ZZ_UXTW,    /* SVE [Zn.<T>, Zm,<T>, UXTW #<msz>].  */
+  AARCH64_OPND_SVE_AIMM,       /* SVE unsigned arithmetic immediate.  */
+  AARCH64_OPND_SVE_ASIMM,      /* SVE signed arithmetic immediate.  */
+  AARCH64_OPND_SVE_FPIMM8,     /* SVE 8-bit floating-point immediate.  */
+  AARCH64_OPND_SVE_I1_HALF_ONE,        /* SVE choice between 0.5 and 1.0.  */
+  AARCH64_OPND_SVE_I1_HALF_TWO,        /* SVE choice between 0.5 and 2.0.  */
+  AARCH64_OPND_SVE_I1_ZERO_ONE,        /* SVE choice between 0.0 and 1.0.  */
+  AARCH64_OPND_SVE_INV_LIMM,   /* SVE inverted logical immediate.  */
+  AARCH64_OPND_SVE_LIMM,       /* SVE logical immediate.  */
+  AARCH64_OPND_SVE_LIMM_MOV,   /* SVE logical immediate for MOV.  */
+  AARCH64_OPND_SVE_PATTERN,    /* SVE vector pattern enumeration.  */
+  AARCH64_OPND_SVE_PATTERN_SCALED, /* Likewise, with additional MUL factor.  */
+  AARCH64_OPND_SVE_PRFOP,      /* SVE prefetch operation.  */
   AARCH64_OPND_SVE_Pd,         /* SVE p0-p15 in Pd.  */
   AARCH64_OPND_SVE_Pg3,                /* SVE p0-p7 in Pg.  */
   AARCH64_OPND_SVE_Pg4_5,      /* SVE p0-p15 in Pg, bits [8,5].  */
@@ -252,6 +308,24 @@ enum aarch64_opnd
   AARCH64_OPND_SVE_Pm,         /* SVE p0-p15 in Pm.  */
   AARCH64_OPND_SVE_Pn,         /* SVE p0-p15 in Pn.  */
   AARCH64_OPND_SVE_Pt,         /* SVE p0-p15 in Pt.  */
+  AARCH64_OPND_SVE_Rm,         /* Integer Rm or ZR, alt. SVE position.  */
+  AARCH64_OPND_SVE_Rn_SP,      /* Integer Rn or SP, alt. SVE position.  */
+  AARCH64_OPND_SVE_SHLIMM_PRED,          /* SVE shift left amount (predicated).  */
+  AARCH64_OPND_SVE_SHLIMM_UNPRED, /* SVE shift left amount (unpredicated).  */
+  AARCH64_OPND_SVE_SHRIMM_PRED,          /* SVE shift right amount (predicated).  */
+  AARCH64_OPND_SVE_SHRIMM_UNPRED, /* SVE shift right amount (unpredicated).  */
+  AARCH64_OPND_SVE_SIMM5,      /* SVE signed 5-bit immediate.  */
+  AARCH64_OPND_SVE_SIMM5B,     /* SVE secondary signed 5-bit immediate.  */
+  AARCH64_OPND_SVE_SIMM6,      /* SVE signed 6-bit immediate.  */
+  AARCH64_OPND_SVE_SIMM8,      /* SVE signed 8-bit immediate.  */
+  AARCH64_OPND_SVE_UIMM3,      /* SVE unsigned 3-bit immediate.  */
+  AARCH64_OPND_SVE_UIMM7,      /* SVE unsigned 7-bit immediate.  */
+  AARCH64_OPND_SVE_UIMM8,      /* SVE unsigned 8-bit immediate.  */
+  AARCH64_OPND_SVE_UIMM8_53,   /* SVE split unsigned 8-bit immediate.  */
+  AARCH64_OPND_SVE_VZn,                /* Scalar SIMD&FP register in Zn field.  */
+  AARCH64_OPND_SVE_Vd,         /* Scalar SIMD&FP register in Vd.  */
+  AARCH64_OPND_SVE_Vm,         /* Scalar SIMD&FP register in Vm.  */
+  AARCH64_OPND_SVE_Vn,         /* Scalar SIMD&FP register in Vn.  */
   AARCH64_OPND_SVE_Za_5,       /* SVE vector register in Za, bits [9,5].  */
   AARCH64_OPND_SVE_Za_16,      /* SVE vector register in Za, bits [20,16].  */
   AARCH64_OPND_SVE_Zd,         /* SVE vector register in Zd.  */
@@ -315,7 +389,11 @@ enum aarch64_opnd_qualifier
   AARCH64_OPND_QLF_V_2D,
   AARCH64_OPND_QLF_V_1Q,
 
+  AARCH64_OPND_QLF_P_Z,
+  AARCH64_OPND_QLF_P_M,
+
   /* Constraint on value.  */
+  AARCH64_OPND_QLF_CR,         /* CRn, CRm. */
   AARCH64_OPND_QLF_imm_0_7,
   AARCH64_OPND_QLF_imm_0_15,
   AARCH64_OPND_QLF_imm_0_31,
@@ -391,6 +469,7 @@ enum aarch64_insn_class
   ldst_immpost,
   ldst_immpre,
   ldst_imm9,   /* immpost or immpre */
+  ldst_imm10,  /* LDRAA/LDRAB */
   ldst_pos,
   ldst_regoff,
   ldst_unpriv,
@@ -406,6 +485,18 @@ enum aarch64_insn_class
   movewide,
   pcreladdr,
   ic_system,
+  sve_cpy,
+  sve_index,
+  sve_limm,
+  sve_misc,
+  sve_movprfx,
+  sve_pred_zm,
+  sve_shift_pred,
+  sve_shift_unpred,
+  sve_size_bhs,
+  sve_size_bhsd,
+  sve_size_hsd,
+  sve_size_sd,
   testbranch,
 };
 
@@ -496,6 +587,20 @@ enum aarch64_op
   OP_UXTL,
   OP_UXTL2,
 
+  OP_MOV_P_P,
+  OP_MOV_Z_P_Z,
+  OP_MOV_Z_V,
+  OP_MOV_Z_Z,
+  OP_MOV_Z_Zi,
+  OP_MOVM_P_P_P,
+  OP_MOVS_P_P,
+  OP_MOVZS_P_P_P,
+  OP_MOVZ_P_P_P,
+  OP_NOTS_P_P_P_Z,
+  OP_NOT_P_P_P_Z,
+
+  OP_FCMLA_ELEM,       /* ARMv8.3, indexed element version.  */
+
   OP_TOTAL_NUM,                /* Pseudo.  */
 };
 
@@ -740,6 +845,8 @@ enum aarch64_modifier_kind
   AARCH64_MOD_SXTH,
   AARCH64_MOD_SXTW,
   AARCH64_MOD_SXTX,
+  AARCH64_MOD_MUL,
+  AARCH64_MOD_MUL_VL,
 };
 
 bfd_boolean
@@ -753,7 +860,7 @@ typedef struct
 {
   /* A list of names with the first one as the disassembly preference;
      terminated by NULL if fewer than 3.  */
-  const char *names[3];
+  const char *names[4];
   aarch64_insn value;
 } aarch64_cond;
 
@@ -831,10 +938,10 @@ struct aarch64_opnd_info
   struct
     {
       enum aarch64_modifier_kind kind;
-      int amount;
       unsigned operator_present: 1;    /* Only valid during encoding.  */
       /* Value of the 'S' field in ld/st reg offset; used only in decoding.  */
       unsigned amount_present: 1;
+      int64_t amount;
     } shifter;
 
   unsigned skip:1;     /* Operand is not completed if there is a fixup needed
@@ -1012,6 +1119,9 @@ aarch64_get_operand_name (enum aarch64_opnd);
 extern const char *
 aarch64_get_operand_desc (enum aarch64_opnd);
 
+extern bfd_boolean
+aarch64_sve_dupm_mov_immediate_p (uint64_t, int);
+
 #ifdef DEBUG_AARCH64
 extern int debug_dump;
 
@@ -1034,6 +1144,9 @@ aarch64_verbose (const char *, ...) __attribute__ ((format (printf, 1, 2)));
 #define DEBUG_TRACE_IF(C, M, ...) ;
 #endif /* DEBUG_AARCH64 */
 
+extern const char *const aarch64_sve_pattern_array[32];
+extern const char *const aarch64_sve_prfop_array[16];
+
 #ifdef __cplusplus
 }
 #endif
This page took 0.028849 seconds and 4 git commands to generate.