Instruction Reference
abs Dn, Dm
abs Vn.B16, Vm.B16
abs Vn.B8, Vm.B8
abs Vn.H8, Vm.H8
abs Vn.H4, Vm.H4
abs Vn.S4, Vm.S4
abs Vn.S2, Vm.S2
abs Vn.D2, Vm.D2
adc Wn, Wm, Wa
adc Xn, Xm, Xa
adcs Wn, Wm, Wa
adcs Xn, Xm, Xa
add Wn, Wm, Wa {, LSL|LSR|ASR #uimm } (#uimm < 32)
add Xn, Xm, Xa {, LSL|LSR|ASR #uimm } (#uimm < 64)
add Wn|WSP, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
add Xn|SP, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } } (0 <= #uimm <= 4)
add Xn|SP, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
add Wn|WSP, Wm|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
add Xn|SP, Xm|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
add Dn, Dm, Da
add Vn.B16, Vm.B16, Va.B16
add Vn.B8, Vm.B8, Va.B8
add Vn.H8, Vm.H8, Va.H8
add Vn.H4, Vm.H4, Va.H4
add Vn.S4, Vm.S4, Va.S4
add Vn.S2, Vm.S2, Va.S2
add Vn.D2, Vm.D2, Va.D2
addhn Vn.B8, Vm.H8, Va.H8
addhn Vn.H4, Vm.S4, Va.S4
addhn Vn.S2, Vm.D2, Va.D2
addhn2 Vn.B16, Vm.H8, Va.H8
addhn2 Vn.H8, Vm.S4, Va.S4
addhn2 Vn.S4, Vm.D2, Va.D2
addp Dn, Vm.D2
addp Vn.B16, Vm.B16, Va.B16
addp Vn.B8, Vm.B8, Va.B8
addp Vn.H8, Vm.H8, Va.H8
addp Vn.H4, Vm.H4, Va.H4
addp Vn.S4, Vm.S4, Va.S4
addp Vn.S2, Vm.S2, Va.S2
addp Vn.D2, Vm.D2, Va.D2
adds Wn, Wm, Wa {, LSL|LSR|ASR #uimm } (#uimm < 32)
adds Xn, Xm, Xa {, LSL|LSR|ASR #uimm } (#uimm < 64)
adds Wn, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
adds Xn, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } } (0 <= #uimm <= 4)
adds Xn, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
adds Wn, Wm|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
adds Xn, Xm|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
addv Bn, Vm.B16
addv Bn, Vm.B8
addv Hn, Vm.H8
addv Hn, Vm.H4
addv Sn, Vm.S4
adr Xn, <offset> (offset is 21 bit)
adrp Xn, <offset> (offset is 21 bit, 4K-page aligned)
and Vn.B16, Vm.B16, Va.B16
and Vn.B8, Vm.B8, Va.B8
and Wn|WSP, Wm, #imm (#imm is a logical immediate)
and Xn|SP, Xm, #imm (#imm is a logical immediate)
and Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
and Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
ands Wn, Wm, #imm (#imm is a logical immediate)
ands Xn, Xm, #imm (#imm is a logical immediate)
ands Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
ands Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
asr Wn, Wm, Wa
asr Xn, Xm, Xa
asr Wn, Wm, #uimm (#uimm < 32)
asr Xn, Xm, #uimm (#uimm < 64)
asrv Wn, Wm, Wa
asrv Xn, Xm, Xa
b.<cond> <offset> (offset is 19 bit, 4-byte aligned)
b <offset> (offset is 26 bit, 4-byte aligned)
bcax Vn.B16, Vm.B16, Va.B16, Vb.B16
bfc Wn, #uimm, #uimm1 (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfc Xn, #uimm, #uimm1 (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)
bfi Wn, Wm, #uimm, #uimm1 (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfi Xn, Xm, #uimm, #uimm1 (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)
bfm Wn, Wm, #uimm, #uimm1 (#uimm < 32, #uimm1 < 32)
bfm Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
bfxil Wn, Wm, #uimm, #uimm1 (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfxil Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
bic Vn.H8, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
bic Vn.H4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
bic Vn.S4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
bic Vn.S2, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
bic Vn.B16, Vm.B16, Va.B16
bic Vn.B8, Vm.B8, Va.B8
bic Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
bic Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
bics Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
bics Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
bif Vn.B16, Vm.B16, Va.B16
bif Vn.B8, Vm.B8, Va.B8
bit Vn.B16, Vm.B16, Va.B16
bit Vn.B8, Vm.B8, Va.B8
bl <offset> (offset is 26 bit, 4-byte aligned)
brk #uimm (#uimm < 65536)
bsl Vn.B16, Vm.B16, Va.B16
bsl Vn.B8, Vm.B8, Va.B8
cas Wn, Wm, [Xa|SP]
cas Xn, Xm, [Xa|SP]
casa Wn, Wm, [Xa|SP]
casa Xn, Xm, [Xa|SP]
casal Wn, Wm, [Xa|SP]
casal Xn, Xm, [Xa|SP]
casl Wn, Wm, [Xa|SP]
casl Xn, Xm, [Xa|SP]
casp Wn, Wn+1, Wm, Wm+1, [Xa|SP] (n is even, m is even)
casp Xn, Xn+1, Xm, Xm+1, [Xa|SP] (n is even, m is even)
caspa Wn, Wn+1, Wm, Wm+1, [Xa|SP] (n is even, m is even)
caspa Xn, Xn+1, Xm, Xm+1, [Xa|SP] (n is even, m is even)
caspal Wn, Wn+1, Wm, Wm+1, [Xa|SP] (n is even, m is even)
caspal Xn, Xn+1, Xm, Xm+1, [Xa|SP] (n is even, m is even)
caspl Wn, Wn+1, Wm, Wm+1, [Xa|SP] (n is even, m is even)
caspl Xn, Xn+1, Xm, Xm+1, [Xa|SP] (n is even, m is even)
cbnz Wn, <offset> (offset is 19 bit, 4-byte aligned)
cbnz Xn, <offset> (offset is 19 bit, 4-byte aligned)
cbz Wn, <offset> (offset is 19 bit, 4-byte aligned)
cbz Xn, <offset> (offset is 19 bit, 4-byte aligned)
ccmn Wn, #uimm, #uimm1, <cond> (#uimm < 32, #uimm1 < 16)
ccmn Xn, #uimm, #uimm1, <cond> (#uimm < 32, #uimm1 < 16)
ccmn Wn, Wm, #uimm, <cond> (#uimm < 16)
ccmn Xn, Xm, #uimm, <cond> (#uimm < 16)
ccmp Wn, #uimm, #uimm1, <cond> (#uimm < 32, #uimm1 < 16)
ccmp Xn, #uimm, #uimm1, <cond> (#uimm < 32, #uimm1 < 16)
ccmp Wn, Wm, #uimm, <cond> (#uimm < 16)
ccmp Xn, Xm, #uimm, <cond> (#uimm < 16)
cinc Wn, Wm, <cond>
cinc Xn, Xm, <cond>
cinv Wn, Wm, <cond>
cinv Xn, Xm, <cond>
clrex #uimm (#uimm < 16)
clrex
cls Vn.B16, Vm.B16
cls Vn.B8, Vm.B8
cls Vn.H8, Vm.H8
cls Vn.H4, Vm.H4
cls Vn.S4, Vm.S4
cls Vn.S2, Vm.S2
cls Wn, Wm
cls Xn, Xm
clz Vn.B16, Vm.B16
clz Vn.B8, Vm.B8
clz Vn.H8, Vm.H8
clz Vn.H4, Vm.H4
clz Vn.S4, Vm.S4
clz Vn.S2, Vm.S2
clz Wn, Wm
clz Xn, Xm
cmeq Dn, Dm, Da
cmeq Vn.B16, Vm.B16, Va.B16
cmeq Vn.B8, Vm.B8, Va.B8
cmeq Vn.H8, Vm.H8, Va.H8
cmeq Vn.H4, Vm.H4, Va.H4
cmeq Vn.S4, Vm.S4, Va.S4
cmeq Vn.S2, Vm.S2, Va.S2
cmeq Vn.D2, Vm.D2, Va.D2
cmeq Dn, Dm, 0
cmeq Vn.B16, Vm.B16, 0
cmeq Vn.B8, Vm.B8, 0
cmeq Vn.H8, Vm.H8, 0
cmeq Vn.H4, Vm.H4, 0
cmeq Vn.S4, Vm.S4, 0
cmeq Vn.S2, Vm.S2, 0
cmeq Vn.D2, Vm.D2, 0
cmge Dn, Dm, Da
cmge Vn.B16, Vm.B16, Va.B16
cmge Vn.B8, Vm.B8, Va.B8
cmge Vn.H8, Vm.H8, Va.H8
cmge Vn.H4, Vm.H4, Va.H4
cmge Vn.S4, Vm.S4, Va.S4
cmge Vn.S2, Vm.S2, Va.S2
cmge Vn.D2, Vm.D2, Va.D2
cmge Dn, Dm, 0
cmge Vn.B16, Vm.B16, 0
cmge Vn.B8, Vm.B8, 0
cmge Vn.H8, Vm.H8, 0
cmge Vn.H4, Vm.H4, 0
cmge Vn.S4, Vm.S4, 0
cmge Vn.S2, Vm.S2, 0
cmge Vn.D2, Vm.D2, 0
cmgt Dn, Dm, Da
cmgt Vn.B16, Vm.B16, Va.B16
cmgt Vn.B8, Vm.B8, Va.B8
cmgt Vn.H8, Vm.H8, Va.H8
cmgt Vn.H4, Vm.H4, Va.H4
cmgt Vn.S4, Vm.S4, Va.S4
cmgt Vn.S2, Vm.S2, Va.S2
cmgt Vn.D2, Vm.D2, Va.D2
cmgt Dn, Dm, 0
cmgt Vn.B16, Vm.B16, 0
cmgt Vn.B8, Vm.B8, 0
cmgt Vn.H8, Vm.H8, 0
cmgt Vn.H4, Vm.H4, 0
cmgt Vn.S4, Vm.S4, 0
cmgt Vn.S2, Vm.S2, 0
cmgt Vn.D2, Vm.D2, 0
cmhi Dn, Dm, Da
cmhi Vn.B16, Vm.B16, Va.B16
cmhi Vn.B8, Vm.B8, Va.B8
cmhi Vn.H8, Vm.H8, Va.H8
cmhi Vn.H4, Vm.H4, Va.H4
cmhi Vn.S4, Vm.S4, Va.S4
cmhi Vn.S2, Vm.S2, Va.S2
cmhi Vn.D2, Vm.D2, Va.D2
cmhs Dn, Dm, Da
cmhs Vn.B16, Vm.B16, Va.B16
cmhs Vn.B8, Vm.B8, Va.B8
cmhs Vn.H8, Vm.H8, Va.H8
cmhs Vn.H4, Vm.H4, Va.H4
cmhs Vn.S4, Vm.S4, Va.S4
cmhs Vn.S2, Vm.S2, Va.S2
cmhs Vn.D2, Vm.D2, Va.D2
cmle Dn, Dm, 0
cmle Vn.B16, Vm.B16, 0
cmle Vn.B8, Vm.B8, 0
cmle Vn.H8, Vm.H8, 0
cmle Vn.H4, Vm.H4, 0
cmle Vn.S4, Vm.S4, 0
cmle Vn.S2, Vm.S2, 0
cmle Vn.D2, Vm.D2, 0
cmlt Dn, Dm, 0
cmlt Vn.B16, Vm.B16, 0
cmlt Vn.B8, Vm.B8, 0
cmlt Vn.H8, Vm.H8, 0
cmlt Vn.H4, Vm.H4, 0
cmlt Vn.S4, Vm.S4, 0
cmlt Vn.S2, Vm.S2, 0
cmlt Vn.D2, Vm.D2, 0
cmn Wn, Wm {, LSL|LSR|ASR #uimm } (#uimm < 32)
cmn Xn, Xm {, LSL|LSR|ASR #uimm } (#uimm < 64)
cmn Wn|WSP, Wm {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
cmn Xn|SP, Wm, UXT[BHW]|SXT[BHW] { #uimm } (0 <= #uimm <= 4)
cmn Xn|SP, Xm {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
cmn Wn|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
cmn Xn|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
cmp Wn, Wm {, LSL|LSR|ASR #uimm } (#uimm < 32)
cmp Xn, Xm {, LSL|LSR|ASR #uimm } (#uimm < 64)
cmp Wn|WSP, Wm {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
cmp Xn|SP, Wm, UXT[BHW]|SXT[BHW] { #uimm } (0 <= #uimm <= 4)
cmp Xn|SP, Xm {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
cmp Wn|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
cmp Xn|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
cmtst Dn, Dm, Da
cmtst Vn.B16, Vm.B16, Va.B16
cmtst Vn.B8, Vm.B8, Va.B8
cmtst Vn.H8, Vm.H8, Va.H8
cmtst Vn.H4, Vm.H4, Va.H4
cmtst Vn.S4, Vm.S4, Va.S4
cmtst Vn.S2, Vm.S2, Va.S2
cmtst Vn.D2, Vm.D2, Va.D2
cneg Wn, Wm, <cond>
cneg Xn, Xm, <cond>
cnt Vn.B16, Vm.B16
cnt Vn.B8, Vm.B8
csel Wn, Wm, Wa, <cond>
csel Xn, Xm, Xa, <cond>
cset Wn, <cond>
cset Xn, <cond>
csetm Wn, <cond>
csetm Xn, <cond>
csinc Wn, Wm, Wa, <cond>
csinc Xn, Xm, Xa, <cond>
csinv Wn, Wm, Wa, <cond>
csinv Xn, Xm, Xa, <cond>
csneg Wn, Wm, Wa, <cond>
csneg Xn, Xm, Xa, <cond>
dcps1 { #uimm } (#uimm < 65536)
dcps2 { #uimm } (#uimm < 65536)
dcps3 { #uimm } (#uimm < 65536)
dmb barrier_op
dmb #uimm (#uimm < 16)
dsb barrier_op
dsb #uimm (#uimm < 16)
dup Bn, Vm.B[uimm] (#uimm < 16)
dup Hn, Vm.H[uimm] (#uimm < 8)
dup Sn, Vm.S[uimm] (#uimm < 4)
dup Dn, Vm.D[uimm] (#uimm < 2)
dup Vn.B16, Vm.B[uimm] (#uimm < 16)
dup Vn.B8, Vm.B[uimm] (#uimm < 16)
dup Vn.H8, Vm.H[uimm] (#uimm < 8)
dup Vn.H4, Vm.H[uimm] (#uimm < 8)
dup Vn.S4, Vm.S[uimm] (#uimm < 4)
dup Vn.S2, Vm.S[uimm] (#uimm < 4)
dup Vn.D2, Vm.D[uimm] (#uimm < 2)
dup Vn.B16, Wm
dup Vn.B8, Wm
dup Vn.H8, Wm
dup Vn.H4, Wm
dup Vn.S4, Wm
dup Vn.S2, Wm
dup Vn.D2, Xm
eon Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
eon Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
eor Vn.B16, Vm.B16, Va.B16
eor Vn.B8, Vm.B8, Va.B8
eor Wn|WSP, Wm, #imm (#imm is a logical immediate)
eor Xn|SP, Xm, #imm (#imm is a logical immediate)
eor Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
eor Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
eor3 Vn.B16, Vm.B16, Va.B16, Vb.B16
ext Vn.B8, Vm.B8, Va.B8, #uimm (#uimm < 8)
ext Vn.B16, Vm.B16, Va.B16, #uimm (#uimm < 16)
extr Wn, Wm, Wa, #uimm (#uimm < 32)
extr Xn, Xm, Xa, #uimm (#uimm < 64)
fabd Hn, Hm, Ha
fabd Sn, Sm, Sa
fabd Dn, Dm, Da
fabd Vn.H8, Vm.H8, Va.H8
fabd Vn.H4, Vm.H4, Va.H4
fabd Vn.S4, Vm.S4, Va.S4
fabd Vn.S2, Vm.S2, Va.S2
fabd Vn.D2, Vm.D2, Va.D2
fabs Vn.H8, Vm.H8
fabs Vn.H4, Vm.H4
fabs Vn.S4, Vm.S4
fabs Vn.S2, Vm.S2
fabs Vn.D2, Vm.D2
fabs Hn, Hm
fabs Sn, Sm
fabs Dn, Dm
facge Hn, Hm, Ha
facge Sn, Sm, Sa
facge Dn, Dm, Da
facge Vn.H8, Vm.H8, Va.H8
facge Vn.H4, Vm.H4, Va.H4
facge Vn.S4, Vm.S4, Va.S4
facge Vn.S2, Vm.S2, Va.S2
facge Vn.D2, Vm.D2, Va.D2
facgt Hn, Hm, Ha
facgt Sn, Sm, Sa
facgt Dn, Dm, Da
facgt Vn.H8, Vm.H8, Va.H8
facgt Vn.H4, Vm.H4, Va.H4
facgt Vn.S4, Vm.S4, Va.S4
facgt Vn.S2, Vm.S2, Va.S2
facgt Vn.D2, Vm.D2, Va.D2
fadd Vn.H8, Vm.H8, Va.H8
fadd Vn.H4, Vm.H4, Va.H4
fadd Vn.S4, Vm.S4, Va.S4
fadd Vn.S2, Vm.S2, Va.S2
fadd Vn.D2, Vm.D2, Va.D2
fadd Hn, Hm, Ha
fadd Sn, Sm, Sa
fadd Dn, Dm, Da
faddp Hn, Vm.H2
faddp Sn, Vm.S2
faddp Dn, Vm.D2
faddp Vn.H8, Vm.H8, Va.H8
faddp Vn.H4, Vm.H4, Va.H4
faddp Vn.S4, Vm.S4, Va.S4
faddp Vn.S2, Vm.S2, Va.S2
faddp Vn.D2, Vm.D2, Va.D2
fcadd Vn.H8, Vm.H8, Va.H8, #uimm (#uimm = [90, 270])
fcadd Vn.H4, Vm.H4, Va.H4, #uimm (#uimm = [90, 270])
fcadd Vn.S4, Vm.S4, Va.S4, #uimm (#uimm = [90, 270])
fcadd Vn.S2, Vm.S2, Va.S2, #uimm (#uimm = [90, 270])
fcadd Vn.D2, Vm.D2, Va.D2, #uimm (#uimm = [90, 270])
fccmp Hn, Hm, #uimm, <cond> (#uimm < 16)
fccmp Sn, Sm, #uimm, <cond> (#uimm < 16)
fccmp Dn, Dm, #uimm, <cond> (#uimm < 16)
fccmpe Hn, Hm, #uimm, <cond> (#uimm < 16)
fccmpe Sn, Sm, #uimm, <cond> (#uimm < 16)
fccmpe Dn, Dm, #uimm, <cond> (#uimm < 16)
fcmeq Hn, Hm, Ha
fcmeq Sn, Sm, Sa
fcmeq Dn, Dm, Da
fcmeq Vn.H8, Vm.H8, Va.H8
fcmeq Vn.H4, Vm.H4, Va.H4
fcmeq Vn.S4, Vm.S4, Va.S4
fcmeq Vn.S2, Vm.S2, Va.S2
fcmeq Vn.D2, Vm.D2, Va.D2
fcmeq Hn, Hm, 0
fcmeq Sn, Sm, 0
fcmeq Dn, Dm, 0
fcmeq Vn.H8, Vm.H8, 0
fcmeq Vn.H4, Vm.H4, 0
fcmeq Vn.S4, Vm.S4, 0
fcmeq Vn.S2, Vm.S2, 0
fcmeq Vn.D2, Vm.D2, 0
fcmge Hn, Hm, Ha
fcmge Sn, Sm, Sa
fcmge Dn, Dm, Da
fcmge Vn.H8, Vm.H8, Va.H8
fcmge Vn.H4, Vm.H4, Va.H4
fcmge Vn.S4, Vm.S4, Va.S4
fcmge Vn.S2, Vm.S2, Va.S2
fcmge Vn.D2, Vm.D2, Va.D2
fcmge Hn, Hm, 0
fcmge Sn, Sm, 0
fcmge Dn, Dm, 0
fcmge Vn.H8, Vm.H8, 0
fcmge Vn.H4, Vm.H4, 0
fcmge Vn.S4, Vm.S4, 0
fcmge Vn.S2, Vm.S2, 0
fcmge Vn.D2, Vm.D2, 0
fcmgt Hn, Hm, Ha
fcmgt Sn, Sm, Sa
fcmgt Dn, Dm, Da
fcmgt Vn.H8, Vm.H8, Va.H8
fcmgt Vn.H4, Vm.H4, Va.H4
fcmgt Vn.S4, Vm.S4, Va.S4
fcmgt Vn.S2, Vm.S2, Va.S2
fcmgt Vn.D2, Vm.D2, Va.D2
fcmgt Hn, Hm, 0
fcmgt Sn, Sm, 0
fcmgt Dn, Dm, 0
fcmgt Vn.H8, Vm.H8, 0
fcmgt Vn.H4, Vm.H4, 0
fcmgt Vn.S4, Vm.S4, 0
fcmgt Vn.S2, Vm.S2, 0
fcmgt Vn.D2, Vm.D2, 0
fcmla Vn.H4, Vm.H4, Va.H[uimm], #uimm1 (#uimm < 2, #uimm1 = [0, 90, 180, 270])
fcmla Vn.H8, Vm.H8, Va.H[uimm], #uimm1 (#uimm < 4, #uimm1 = [0, 90, 180, 270])
fcmla Vn.S4, Vm.S4, Va.S[uimm], #uimm1 (#uimm < 2, #uimm1 = [0, 90, 180, 270])
fcmla Vn.H8, Vm.H8, Va.H8, #uimm (#uimm = [0, 90, 180, 270])
fcmla Vn.H4, Vm.H4, Va.H4, #uimm (#uimm = [0, 90, 180, 270])
fcmla Vn.S4, Vm.S4, Va.S4, #uimm (#uimm = [0, 90, 180, 270])
fcmla Vn.S2, Vm.S2, Va.S2, #uimm (#uimm = [0, 90, 180, 270])
fcmla Vn.D2, Vm.D2, Va.D2, #uimm (#uimm = [0, 90, 180, 270])
fcmle Hn, Hm, 0
fcmle Sn, Sm, 0
fcmle Dn, Dm, 0
fcmle Vn.H8, Vm.H8, 0
fcmle Vn.H4, Vm.H4, 0
fcmle Vn.S4, Vm.S4, 0
fcmle Vn.S2, Vm.S2, 0
fcmle Vn.D2, Vm.D2, 0
fcmlt Hn, Hm, 0
fcmlt Sn, Sm, 0
fcmlt Dn, Dm, 0
fcmlt Vn.H8, Vm.H8, 0
fcmlt Vn.H4, Vm.H4, 0
fcmlt Vn.S4, Vm.S4, 0
fcmlt Vn.S2, Vm.S2, 0
fcmlt Vn.D2, Vm.D2, 0
fcmp Hn, Hm
fcmp Hn, 0
fcmp Sn, Sm
fcmp Sn, 0
fcmp Dn, Dm
fcmp Dn, 0
fcmpe Hn, Hm
fcmpe Hn, 0
fcmpe Sn, Sm
fcmpe Sn, 0
fcmpe Dn, Dm
fcmpe Dn, 0
fcsel Hn, Hm, Ha, <cond>
fcsel Sn, Sm, Sa, <cond>
fcsel Dn, Dm, Da, <cond>
fcvt Sn, Hm
fcvt Dn, Hm
fcvt Hn, Sm
fcvt Dn, Sm
fcvt Hn, Dm
fcvt Sn, Dm
fcvtas Hn, Hm
fcvtas Sn, Sm
fcvtas Dn, Dm
fcvtas Vn.H8, Vm.H8
fcvtas Vn.H4, Vm.H4
fcvtas Vn.S4, Vm.S4
fcvtas Vn.S2, Vm.S2
fcvtas Vn.D2, Vm.D2
fcvtas Wn, Hm
fcvtas Xn, Hm
fcvtas Wn, Sm
fcvtas Xn, Sm
fcvtas Wn, Dm
fcvtas Xn, Dm
fcvtau Hn, Hm
fcvtau Sn, Sm
fcvtau Dn, Dm
fcvtau Vn.H8, Vm.H8
fcvtau Vn.H4, Vm.H4
fcvtau Vn.S4, Vm.S4
fcvtau Vn.S2, Vm.S2
fcvtau Vn.D2, Vm.D2
fcvtau Wn, Hm
fcvtau Xn, Hm
fcvtau Wn, Sm
fcvtau Xn, Sm
fcvtau Wn, Dm
fcvtau Xn, Dm
fcvtl Vn.S4, Vm.H4
fcvtl Vn.D2, Vm.S2
fcvtl2 Vn.S4, Vm.H8
fcvtl2 Vn.D2, Vm.S4
fcvtms Hn, Hm
fcvtms Sn, Sm
fcvtms Dn, Dm
fcvtms Vn.H8, Vm.H8
fcvtms Vn.H4, Vm.H4
fcvtms Vn.S4, Vm.S4
fcvtms Vn.S2, Vm.S2
fcvtms Vn.D2, Vm.D2
fcvtms Wn, Hm
fcvtms Xn, Hm
fcvtms Wn, Sm
fcvtms Xn, Sm
fcvtms Wn, Dm
fcvtms Xn, Dm
fcvtmu Hn, Hm
fcvtmu Sn, Sm
fcvtmu Dn, Dm
fcvtmu Vn.H8, Vm.H8
fcvtmu Vn.H4, Vm.H4
fcvtmu Vn.S4, Vm.S4
fcvtmu Vn.S2, Vm.S2
fcvtmu Vn.D2, Vm.D2
fcvtmu Wn, Hm
fcvtmu Xn, Hm
fcvtmu Wn, Sm
fcvtmu Xn, Sm
fcvtmu Wn, Dm
fcvtmu Xn, Dm
fcvtns Hn, Hm
fcvtns Sn, Sm
fcvtns Dn, Dm
fcvtns Vn.H8, Vm.H8
fcvtns Vn.H4, Vm.H4
fcvtns Vn.S4, Vm.S4
fcvtns Vn.S2, Vm.S2
fcvtns Vn.D2, Vm.D2
fcvtns Wn, Hm
fcvtns Xn, Hm
fcvtns Wn, Sm
fcvtns Xn, Sm
fcvtns Wn, Dm
fcvtns Xn, Dm
fcvtnu Hn, Hm
fcvtnu Sn, Sm
fcvtnu Dn, Dm
fcvtnu Vn.H8, Vm.H8
fcvtnu Vn.H4, Vm.H4
fcvtnu Vn.S4, Vm.S4
fcvtnu Vn.S2, Vm.S2
fcvtnu Vn.D2, Vm.D2
fcvtnu Wn, Hm
fcvtnu Xn, Hm
fcvtnu Wn, Sm
fcvtnu Xn, Sm
fcvtnu Wn, Dm
fcvtnu Xn, Dm
fcvtps Hn, Hm
fcvtps Sn, Sm
fcvtps Dn, Dm
fcvtps Vn.H8, Vm.H8
fcvtps Vn.H4, Vm.H4
fcvtps Vn.S4, Vm.S4
fcvtps Vn.S2, Vm.S2
fcvtps Vn.D2, Vm.D2
fcvtps Wn, Hm
fcvtps Xn, Hm
fcvtps Wn, Sm
fcvtps Xn, Sm
fcvtps Wn, Dm
fcvtps Xn, Dm
fcvtpu Hn, Hm
fcvtpu Sn, Sm
fcvtpu Dn, Dm
fcvtpu Vn.H8, Vm.H8
fcvtpu Vn.H4, Vm.H4
fcvtpu Vn.S4, Vm.S4
fcvtpu Vn.S2, Vm.S2
fcvtpu Vn.D2, Vm.D2
fcvtpu Wn, Hm
fcvtpu Xn, Hm
fcvtpu Wn, Sm
fcvtpu Xn, Sm
fcvtpu Wn, Dm
fcvtpu Xn, Dm
fcvtxn Sn, Dm
fcvtxn Vn.S2, Vm.D2
fcvtzs Hn, Hm, #uimm (1 <= #uimm <= 16)
fcvtzs Sn, Sm, #uimm (1 <= #uimm <= 32)
fcvtzs Dn, Dm, #uimm (1 <= #uimm <= 64)
fcvtzs Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
fcvtzs Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
fcvtzs Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
fcvtzs Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
fcvtzs Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
fcvtzs Hn, Hm
fcvtzs Sn, Sm
fcvtzs Dn, Dm
fcvtzs Vn.H8, Vm.H8
fcvtzs Vn.H4, Vm.H4
fcvtzs Vn.S4, Vm.S4
fcvtzs Vn.S2, Vm.S2
fcvtzs Vn.D2, Vm.D2
fcvtzs Wn, Hm, #uimm (1 <= #uimm <= 32)
fcvtzs Xn, Hm, #uimm (1 <= #uimm <= 64)
fcvtzs Wn, Sm, #uimm (1 <= #uimm <= 32)
fcvtzs Xn, Sm, #uimm (1 <= #uimm <= 64)
fcvtzs Wn, Dm, #uimm (1 <= #uimm <= 32)
fcvtzs Xn, Dm, #uimm (1 <= #uimm <= 64)
fcvtzs Wn, Hm
fcvtzs Xn, Hm
fcvtzs Wn, Sm
fcvtzs Xn, Sm
fcvtzs Wn, Dm
fcvtzs Xn, Dm
fcvtzu Hn, Hm, #uimm (1 <= #uimm <= 16)
fcvtzu Sn, Sm, #uimm (1 <= #uimm <= 32)
fcvtzu Dn, Dm, #uimm (1 <= #uimm <= 64)
fcvtzu Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
fcvtzu Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
fcvtzu Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
fcvtzu Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
fcvtzu Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
fcvtzu Hn, Hm
fcvtzu Sn, Sm
fcvtzu Dn, Dm
fcvtzu Vn.H8, Vm.H8
fcvtzu Vn.H4, Vm.H4
fcvtzu Vn.S4, Vm.S4
fcvtzu Vn.S2, Vm.S2
fcvtzu Vn.D2, Vm.D2
fcvtzu Wn, Hm, #uimm (1 <= #uimm <= 32)
fcvtzu Xn, Hm, #uimm (1 <= #uimm <= 64)
fcvtzu Wn, Sm, #uimm (1 <= #uimm <= 32)
fcvtzu Xn, Sm, #uimm (1 <= #uimm <= 64)
fcvtzu Wn, Dm, #uimm (1 <= #uimm <= 32)
fcvtzu Xn, Dm, #uimm (1 <= #uimm <= 64)
fcvtzu Wn, Hm
fcvtzu Xn, Hm
fcvtzu Wn, Sm
fcvtzu Xn, Sm
fcvtzu Wn, Dm
fcvtzu Xn, Dm
fdiv Vn.H8, Vm.H8, Va.H8
fdiv Vn.H4, Vm.H4, Va.H4
fdiv Vn.S4, Vm.S4, Va.S4
fdiv Vn.S2, Vm.S2, Va.S2
fdiv Vn.D2, Vm.D2, Va.D2
fdiv Hn, Hm, Ha
fdiv Sn, Sm, Sa
fdiv Dn, Dm, Da
fmadd Hn, Hm, Ha, Hb
fmadd Sn, Sm, Sa, Sb
fmadd Dn, Dm, Da, Db
fmax Vn.H8, Vm.H8, Va.H8
fmax Vn.H4, Vm.H4, Va.H4
fmax Vn.S4, Vm.S4, Va.S4
fmax Vn.S2, Vm.S2, Va.S2
fmax Vn.D2, Vm.D2, Va.D2
fmax Hn, Hm, Ha
fmax Sn, Sm, Sa
fmax Dn, Dm, Da
fmaxnm Vn.H8, Vm.H8, Va.H8
fmaxnm Vn.H4, Vm.H4, Va.H4
fmaxnm Vn.S4, Vm.S4, Va.S4
fmaxnm Vn.S2, Vm.S2, Va.S2
fmaxnm Vn.D2, Vm.D2, Va.D2
fmaxnm Hn, Hm, Ha
fmaxnm Sn, Sm, Sa
fmaxnm Dn, Dm, Da
fmaxnmp Hn, Vm.H2
fmaxnmp Sn, Vm.S2
fmaxnmp Dn, Vm.D2
fmaxnmp Vn.H8, Vm.H8, Va.H8
fmaxnmp Vn.H4, Vm.H4, Va.H4
fmaxnmp Vn.S4, Vm.S4, Va.S4
fmaxnmp Vn.S2, Vm.S2, Va.S2
fmaxnmp Vn.D2, Vm.D2, Va.D2
fmaxnmv Hn, Vm.H8
fmaxnmv Hn, Vm.H4
fmaxnmv Sn, Vm.S4
fmaxp Hn, Vm.H2
fmaxp Sn, Vm.S2
fmaxp Dn, Vm.D2
fmaxp Vn.H8, Vm.H8, Va.H8
fmaxp Vn.H4, Vm.H4, Va.H4
fmaxp Vn.S4, Vm.S4, Va.S4
fmaxp Vn.S2, Vm.S2, Va.S2
fmaxp Vn.D2, Vm.D2, Va.D2
fmaxv Hn, Vm.H8
fmaxv Hn, Vm.H4
fmaxv Sn, Vm.S4
fmin Vn.H8, Vm.H8, Va.H8
fmin Vn.H4, Vm.H4, Va.H4
fmin Vn.S4, Vm.S4, Va.S4
fmin Vn.S2, Vm.S2, Va.S2
fmin Vn.D2, Vm.D2, Va.D2
fmin Hn, Hm, Ha
fmin Sn, Sm, Sa
fmin Dn, Dm, Da
fminnm Vn.H8, Vm.H8, Va.H8
fminnm Vn.H4, Vm.H4, Va.H4
fminnm Vn.S4, Vm.S4, Va.S4
fminnm Vn.S2, Vm.S2, Va.S2
fminnm Vn.D2, Vm.D2, Va.D2
fminnm Hn, Hm, Ha
fminnm Sn, Sm, Sa
fminnm Dn, Dm, Da
fminnmp Hn, Vm.H2
fminnmp Sn, Vm.S2
fminnmp Dn, Vm.D2
fminnmp Vn.H8, Vm.H8, Va.H8
fminnmp Vn.H4, Vm.H4, Va.H4
fminnmp Vn.S4, Vm.S4, Va.S4
fminnmp Vn.S2, Vm.S2, Va.S2
fminnmp Vn.D2, Vm.D2, Va.D2
fminnmv Hn, Vm.H8
fminnmv Hn, Vm.H4
fminnmv Sn, Vm.S4
fminp Hn, Vm.H2
fminp Sn, Vm.S2
fminp Dn, Vm.D2
fminp Vn.H8, Vm.H8, Va.H8
fminp Vn.H4, Vm.H4, Va.H4
fminp Vn.S4, Vm.S4, Va.S4
fminp Vn.S2, Vm.S2, Va.S2
fminp Vn.D2, Vm.D2, Va.D2
fminv Hn, Vm.H8
fminv Hn, Vm.H4
fminv Sn, Vm.S4
fmla Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
fmla Sn, Sm, Va.S[uimm] (#uimm < 4)
fmla Dn, Dm, Va.D[uimm] (#uimm < 2)
fmla Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
fmla Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmla Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
fmla Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
fmla Vn.D2, Vm.D2, Va.D[uimm] (#uimm < 2)
fmla Vn.H8, Vm.H8, Va.H8
fmla Vn.H4, Vm.H4, Va.H4
fmla Vn.S4, Vm.S4, Va.S4
fmla Vn.S2, Vm.S2, Va.S2
fmla Vn.D2, Vm.D2, Va.D2
fmlal Vn.S2, Vm.H2, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlal Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlal Vn.S2, Vm.H2, Va.H2
fmlal Vn.S4, Vm.H4, Va.H4
fmlal2 Vn.S2, Vm.H2, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlal2 Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlal2 Vn.S2, Vm.H2, Va.H2
fmlal2 Vn.S4, Vm.H4, Va.H4
fmls Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
fmls Sn, Sm, Va.S[uimm] (#uimm < 4)
fmls Dn, Dm, Va.D[uimm] (#uimm < 2)
fmls Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
fmls Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmls Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
fmls Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
fmls Vn.D2, Vm.D2, Va.D[uimm] (#uimm < 2)
fmls Vn.H8, Vm.H8, Va.H8
fmls Vn.H4, Vm.H4, Va.H4
fmls Vn.S4, Vm.S4, Va.S4
fmls Vn.S2, Vm.S2, Va.S2
fmls Vn.D2, Vm.D2, Va.D2
fmlsl Vn.S2, Vm.H2, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlsl Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlsl Vn.S2, Vm.H2, Va.H2
fmlsl Vn.S4, Vm.H4, Va.H4
fmlsl2 Vn.S2, Vm.H2, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlsl2 Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmlsl2 Vn.S2, Vm.H2, Va.H2
fmlsl2 Vn.S4, Vm.H4, Va.H4
fmov Vn.H8, #imm (#imm is a floating point immediate)
fmov Vn.H4, #imm (#imm is a floating point immediate)
fmov Vn.S4, #imm (#imm is a floating point immediate)
fmov Vn.S2, #imm (#imm is a floating point immediate)
fmov Vn.D2, #imm (#imm is a floating point immediate)
fmov Hn, Hm
fmov Sn, Sm
fmov Dn, Dm
fmov Wn, Hm
fmov Xn, Hm
fmov Hn, Wm
fmov Sn, Wm
fmov Wn, Sm
fmov Hn, Xm
fmov Dn, Xm
fmov Vn.D[1], Xm
fmov Xn, Dm
fmov Xn, Vm.D[1]
fmov Hn, #imm (#imm is a floating point immediate)
fmov Sn, #imm (#imm is a floating point immediate)
fmov Dn, #imm (#imm is a floating point immediate)
fmsub Hn, Hm, Ha, Hb
fmsub Sn, Sm, Sa, Sb
fmsub Dn, Dm, Da, Db
fmul Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
fmul Sn, Sm, Va.S[uimm] (#uimm < 4)
fmul Dn, Dm, Va.D[uimm] (#uimm < 2)
fmul Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
fmul Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmul Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
fmul Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
fmul Vn.D2, Vm.D2, Va.D[uimm] (#uimm < 2)
fmul Vn.H8, Vm.H8, Va.H8
fmul Vn.H4, Vm.H4, Va.H4
fmul Vn.S4, Vm.S4, Va.S4
fmul Vn.S2, Vm.S2, Va.S2
fmul Vn.D2, Vm.D2, Va.D2
fmul Hn, Hm, Ha
fmul Sn, Sm, Sa
fmul Dn, Dm, Da
fmulx Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
fmulx Sn, Sm, Va.S[uimm] (#uimm < 4)
fmulx Dn, Dm, Va.D[uimm] (#uimm < 2)
fmulx Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
fmulx Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
fmulx Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
fmulx Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
fmulx Vn.D2, Vm.D2, Va.D[uimm] (#uimm < 2)
fmulx Hn, Hm, Ha
fmulx Sn, Sm, Sa
fmulx Dn, Dm, Da
fmulx Vn.H8, Vm.H8, Va.H8
fmulx Vn.H4, Vm.H4, Va.H4
fmulx Vn.S4, Vm.S4, Va.S4
fmulx Vn.S2, Vm.S2, Va.S2
fmulx Vn.D2, Vm.D2, Va.D2
fneg Vn.H8, Vm.H8
fneg Vn.H4, Vm.H4
fneg Vn.S4, Vm.S4
fneg Vn.S2, Vm.S2
fneg Vn.D2, Vm.D2
fneg Hn, Hm
fneg Sn, Sm
fneg Dn, Dm
fnmadd Hn, Hm, Ha, Hb
fnmadd Sn, Sm, Sa, Sb
fnmadd Dn, Dm, Da, Db
fnmsub Hn, Hm, Ha, Hb
fnmsub Sn, Sm, Sa, Sb
fnmsub Dn, Dm, Da, Db
fnmul Hn, Hm, Ha
fnmul Sn, Sm, Sa
fnmul Dn, Dm, Da
frecpe Hn, Hm
frecpe Sn, Sm
frecpe Dn, Dm
frecpe Vn.H8, Vm.H8
frecpe Vn.H4, Vm.H4
frecpe Vn.S4, Vm.S4
frecpe Vn.S2, Vm.S2
frecpe Vn.D2, Vm.D2
frecps Hn, Hm, Ha
frecps Sn, Sm, Sa
frecps Dn, Dm, Da
frecps Vn.H8, Vm.H8, Va.H8
frecps Vn.H4, Vm.H4, Va.H4
frecps Vn.S4, Vm.S4, Va.S4
frecps Vn.S2, Vm.S2, Va.S2
frecps Vn.D2, Vm.D2, Va.D2
frecpx Hn, Hm
frecpx Sn, Sm
frecpx Dn, Dm
frinta Vn.H8, Vm.H8
frinta Vn.H4, Vm.H4
frinta Vn.S4, Vm.S4
frinta Vn.S2, Vm.S2
frinta Vn.D2, Vm.D2
frinta Hn, Hm
frinta Sn, Sm
frinta Dn, Dm
frinti Vn.H8, Vm.H8
frinti Vn.H4, Vm.H4
frinti Vn.S4, Vm.S4
frinti Vn.S2, Vm.S2
frinti Vn.D2, Vm.D2
frinti Hn, Hm
frinti Sn, Sm
frinti Dn, Dm
frintm Vn.H8, Vm.H8
frintm Vn.H4, Vm.H4
frintm Vn.S4, Vm.S4
frintm Vn.S2, Vm.S2
frintm Vn.D2, Vm.D2
frintm Hn, Hm
frintm Sn, Sm
frintm Dn, Dm
frintn Vn.H8, Vm.H8
frintn Vn.H4, Vm.H4
frintn Vn.S4, Vm.S4
frintn Vn.S2, Vm.S2
frintn Vn.D2, Vm.D2
frintn Hn, Hm
frintn Sn, Sm
frintn Dn, Dm
frintp Vn.H8, Vm.H8
frintp Vn.H4, Vm.H4
frintp Vn.S4, Vm.S4
frintp Vn.S2, Vm.S2
frintp Vn.D2, Vm.D2
frintp Hn, Hm
frintp Sn, Sm
frintp Dn, Dm
frintx Vn.H8, Vm.H8
frintx Vn.H4, Vm.H4
frintx Vn.S4, Vm.S4
frintx Vn.S2, Vm.S2
frintx Vn.D2, Vm.D2
frintx Hn, Hm
frintx Sn, Sm
frintx Dn, Dm
frintz Vn.H8, Vm.H8
frintz Vn.H4, Vm.H4
frintz Vn.S4, Vm.S4
frintz Vn.S2, Vm.S2
frintz Vn.D2, Vm.D2
frintz Hn, Hm
frintz Sn, Sm
frintz Dn, Dm
frsqrte Hn, Hm
frsqrte Sn, Sm
frsqrte Dn, Dm
frsqrte Vn.H8, Vm.H8
frsqrte Vn.H4, Vm.H4
frsqrte Vn.S4, Vm.S4
frsqrte Vn.S2, Vm.S2
frsqrte Vn.D2, Vm.D2
frsqrts Hn, Hm, Ha
frsqrts Sn, Sm, Sa
frsqrts Dn, Dm, Da
frsqrts Vn.H8, Vm.H8, Va.H8
frsqrts Vn.H4, Vm.H4, Va.H4
frsqrts Vn.S4, Vm.S4, Va.S4
frsqrts Vn.S2, Vm.S2, Va.S2
frsqrts Vn.D2, Vm.D2, Va.D2
fsqrt Vn.H8, Vm.H8
fsqrt Vn.H4, Vm.H4
fsqrt Vn.S4, Vm.S4
fsqrt Vn.S2, Vm.S2
fsqrt Vn.D2, Vm.D2
fsqrt Hn, Hm
fsqrt Sn, Sm
fsqrt Dn, Dm
fsub Vn.H8, Vm.H8, Va.H8
fsub Vn.H4, Vm.H4, Va.H4
fsub Vn.S4, Vm.S4, Va.S4
fsub Vn.S2, Vm.S2, Va.S2
fsub Vn.D2, Vm.D2, Va.D2
fsub Hn, Hm, Ha
fsub Sn, Sm, Sa
fsub Dn, Dm, Da
hlt #uimm (#uimm < 65536)
hvc #uimm (#uimm < 65536)
ins Vn.B[uimm], Vm.B[uimm1] (#uimm < 16, #uimm1 < 16)
ins Vn.H[uimm], Vm.H[uimm1] (#uimm < 8, #uimm1 < 8)
ins Vn.S[uimm], Vm.S[uimm1] (#uimm < 4, #uimm1 < 4)
ins Vn.D[uimm], Vm.D[uimm1] (#uimm < 2, #uimm1 < 2)
ins Vn.B[uimm], Wm (#uimm < 16)
ins Vn.H[uimm], Wm (#uimm < 8)
ins Vn.S[uimm], Wm (#uimm < 4)
ins Vn.D[uimm], Xm (#uimm < 2)
isb sy
isb #uimm (#uimm < 16)
isb
ld1 {Vn.B16 * 1}, [Xm|SP]
ld1 {Vn.B8 * 1}, [Xm|SP]
ld1 {Vn.H8 * 1}, [Xm|SP]
ld1 {Vn.H4 * 1}, [Xm|SP]
ld1 {Vn.S4 * 1}, [Xm|SP]
ld1 {Vn.S2 * 1}, [Xm|SP]
ld1 {Vn.D2 * 1}, [Xm|SP]
ld1 {Vn.D1 * 1}, [Xm|SP]
ld1 {Vn.B16 * 2}, [Xm|SP]
ld1 {Vn.B8 * 2}, [Xm|SP]
ld1 {Vn.H8 * 2}, [Xm|SP]
ld1 {Vn.H4 * 2}, [Xm|SP]
ld1 {Vn.S4 * 2}, [Xm|SP]
ld1 {Vn.S2 * 2}, [Xm|SP]
ld1 {Vn.D2 * 2}, [Xm|SP]
ld1 {Vn.D1 * 2}, [Xm|SP]
ld1 {Vn.B16 * 3}, [Xm|SP]
ld1 {Vn.B8 * 3}, [Xm|SP]
ld1 {Vn.H8 * 3}, [Xm|SP]
ld1 {Vn.H4 * 3}, [Xm|SP]
ld1 {Vn.S4 * 3}, [Xm|SP]
ld1 {Vn.S2 * 3}, [Xm|SP]
ld1 {Vn.D2 * 3}, [Xm|SP]
ld1 {Vn.D1 * 3}, [Xm|SP]
ld1 {Vn.B16 * 4}, [Xm|SP]
ld1 {Vn.B8 * 4}, [Xm|SP]
ld1 {Vn.H8 * 4}, [Xm|SP]
ld1 {Vn.H4 * 4}, [Xm|SP]
ld1 {Vn.S4 * 4}, [Xm|SP]
ld1 {Vn.S2 * 4}, [Xm|SP]
ld1 {Vn.D2 * 4}, [Xm|SP]
ld1 {Vn.D1 * 4}, [Xm|SP]
ld1 {Vn.B8 * 1}, [Xm|SP], 8
ld1 {Vn.H4 * 1}, [Xm|SP], 8
ld1 {Vn.S2 * 1}, [Xm|SP], 8
ld1 {Vn.D1 * 1}, [Xm|SP], 8
ld1 {Vn.B16 * 1}, [Xm|SP], 16
ld1 {Vn.H8 * 1}, [Xm|SP], 16
ld1 {Vn.S4 * 1}, [Xm|SP], 16
ld1 {Vn.D2 * 1}, [Xm|SP], 16
ld1 {Vn.B16 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H8 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H4 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S4 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S2 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D2 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D1 * 1}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 2}, [Xm|SP], 16
ld1 {Vn.H4 * 2}, [Xm|SP], 16
ld1 {Vn.S2 * 2}, [Xm|SP], 16
ld1 {Vn.D1 * 2}, [Xm|SP], 16
ld1 {Vn.B16 * 2}, [Xm|SP], 32
ld1 {Vn.H8 * 2}, [Xm|SP], 32
ld1 {Vn.S4 * 2}, [Xm|SP], 32
ld1 {Vn.D2 * 2}, [Xm|SP], 32
ld1 {Vn.B16 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H8 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H4 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S4 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S2 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D2 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D1 * 2}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 3}, [Xm|SP], 24
ld1 {Vn.H4 * 3}, [Xm|SP], 24
ld1 {Vn.S2 * 3}, [Xm|SP], 24
ld1 {Vn.D1 * 3}, [Xm|SP], 24
ld1 {Vn.B16 * 3}, [Xm|SP], 48
ld1 {Vn.H8 * 3}, [Xm|SP], 48
ld1 {Vn.S4 * 3}, [Xm|SP], 48
ld1 {Vn.D2 * 3}, [Xm|SP], 48
ld1 {Vn.B16 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H8 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H4 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S4 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S2 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D2 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D1 * 3}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 4}, [Xm|SP], 32
ld1 {Vn.H4 * 4}, [Xm|SP], 32
ld1 {Vn.S2 * 4}, [Xm|SP], 32
ld1 {Vn.D1 * 4}, [Xm|SP], 32
ld1 {Vn.B16 * 4}, [Xm|SP], 64
ld1 {Vn.H8 * 4}, [Xm|SP], 64
ld1 {Vn.S4 * 4}, [Xm|SP], 64
ld1 {Vn.D2 * 4}, [Xm|SP], 64
ld1 {Vn.B16 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B8 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H8 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.H4 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S4 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.S2 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D2 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.D1 * 4}, [Xm|SP], Xa (a is 0-30)
ld1 {Vn.B * 1}[uimm], [Xm|SP] (#uimm < 16)
ld1 {Vn.H * 1}[uimm], [Xm|SP] (#uimm < 8)
ld1 {Vn.S * 1}[uimm], [Xm|SP] (#uimm < 4)
ld1 {Vn.D * 1}[uimm], [Xm|SP] (#uimm < 2)
ld1 {Vn.B * 1}[uimm], [Xm|SP], 1 (#uimm < 16)
ld1 {Vn.B * 1}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
ld1 {Vn.H * 1}[uimm], [Xm|SP], 2 (#uimm < 8)
ld1 {Vn.H * 1}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
ld1 {Vn.S * 1}[uimm], [Xm|SP], 4 (#uimm < 4)
ld1 {Vn.S * 1}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
ld1 {Vn.D * 1}[uimm], [Xm|SP], 8 (#uimm < 2)
ld1 {Vn.D * 1}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
ld1r {Vn.B16 * 1}, [Xm|SP]
ld1r {Vn.B8 * 1}, [Xm|SP]
ld1r {Vn.H8 * 1}, [Xm|SP]
ld1r {Vn.H4 * 1}, [Xm|SP]
ld1r {Vn.S4 * 1}, [Xm|SP]
ld1r {Vn.S2 * 1}, [Xm|SP]
ld1r {Vn.D2 * 1}, [Xm|SP]
ld1r {Vn.D1 * 1}, [Xm|SP]
ld1r {Vn.B16 * 1}, [Xm|SP], 1
ld1r {Vn.B8 * 1}, [Xm|SP], 1
ld1r {Vn.H8 * 1}, [Xm|SP], 2
ld1r {Vn.H4 * 1}, [Xm|SP], 2
ld1r {Vn.S4 * 1}, [Xm|SP], 4
ld1r {Vn.S2 * 1}, [Xm|SP], 4
ld1r {Vn.D2 * 1}, [Xm|SP], 8
ld1r {Vn.D1 * 1}, [Xm|SP], 8
ld1r {Vn.B16 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.B8 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.H8 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.H4 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.S4 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.S2 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.D2 * 1}, [Xm|SP], Xa (a is 0-30)
ld1r {Vn.D1 * 1}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.B16 * 2}, [Xm|SP]
ld2 {Vn.B8 * 2}, [Xm|SP]
ld2 {Vn.H8 * 2}, [Xm|SP]
ld2 {Vn.H4 * 2}, [Xm|SP]
ld2 {Vn.S4 * 2}, [Xm|SP]
ld2 {Vn.S2 * 2}, [Xm|SP]
ld2 {Vn.D2 * 2}, [Xm|SP]
ld2 {Vn.B8 * 2}, [Xm|SP], 16
ld2 {Vn.H4 * 2}, [Xm|SP], 16
ld2 {Vn.S2 * 2}, [Xm|SP], 16
ld2 {Vn.B16 * 2}, [Xm|SP], 32
ld2 {Vn.H8 * 2}, [Xm|SP], 32
ld2 {Vn.S4 * 2}, [Xm|SP], 32
ld2 {Vn.D2 * 2}, [Xm|SP], 32
ld2 {Vn.B16 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.B8 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.H8 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.H4 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.S4 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.S2 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.D2 * 2}, [Xm|SP], Xa (a is 0-30)
ld2 {Vn.B * 2}[uimm], [Xm|SP] (#uimm < 16)
ld2 {Vn.H * 2}[uimm], [Xm|SP] (#uimm < 8)
ld2 {Vn.S * 2}[uimm], [Xm|SP] (#uimm < 4)
ld2 {Vn.D * 2}[uimm], [Xm|SP] (#uimm < 2)
ld2 {Vn.B * 2}[uimm], [Xm|SP], 2 (#uimm < 16)
ld2 {Vn.B * 2}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
ld2 {Vn.H * 2}[uimm], [Xm|SP], 4 (#uimm < 8)
ld2 {Vn.H * 2}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
ld2 {Vn.S * 2}[uimm], [Xm|SP], 8 (#uimm < 4)
ld2 {Vn.S * 2}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
ld2 {Vn.D * 2}[uimm], [Xm|SP], 16 (#uimm < 2)
ld2 {Vn.D * 2}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
ld2r {Vn.B16 * 2}, [Xm|SP]
ld2r {Vn.B8 * 2}, [Xm|SP]
ld2r {Vn.H8 * 2}, [Xm|SP]
ld2r {Vn.H4 * 2}, [Xm|SP]
ld2r {Vn.S4 * 2}, [Xm|SP]
ld2r {Vn.S2 * 2}, [Xm|SP]
ld2r {Vn.D2 * 2}, [Xm|SP]
ld2r {Vn.D1 * 2}, [Xm|SP]
ld2r {Vn.B16 * 2}, [Xm|SP], 2
ld2r {Vn.B8 * 2}, [Xm|SP], 2
ld2r {Vn.H8 * 2}, [Xm|SP], 4
ld2r {Vn.H4 * 2}, [Xm|SP], 4
ld2r {Vn.S4 * 2}, [Xm|SP], 8
ld2r {Vn.S2 * 2}, [Xm|SP], 8
ld2r {Vn.D2 * 2}, [Xm|SP], 16
ld2r {Vn.D1 * 2}, [Xm|SP], 16
ld2r {Vn.B16 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.B8 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.H8 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.H4 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.S4 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.S2 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.D2 * 2}, [Xm|SP], Xa (a is 0-30)
ld2r {Vn.D1 * 2}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.B16 * 3}, [Xm|SP]
ld3 {Vn.B8 * 3}, [Xm|SP]
ld3 {Vn.H8 * 3}, [Xm|SP]
ld3 {Vn.H4 * 3}, [Xm|SP]
ld3 {Vn.S4 * 3}, [Xm|SP]
ld3 {Vn.S2 * 3}, [Xm|SP]
ld3 {Vn.D2 * 3}, [Xm|SP]
ld3 {Vn.B8 * 3}, [Xm|SP], 24
ld3 {Vn.H4 * 3}, [Xm|SP], 24
ld3 {Vn.S2 * 3}, [Xm|SP], 24
ld3 {Vn.B16 * 3}, [Xm|SP], 48
ld3 {Vn.H8 * 3}, [Xm|SP], 48
ld3 {Vn.S4 * 3}, [Xm|SP], 48
ld3 {Vn.D2 * 3}, [Xm|SP], 48
ld3 {Vn.B16 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.B8 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.H8 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.H4 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.S4 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.S2 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.D2 * 3}, [Xm|SP], Xa (a is 0-30)
ld3 {Vn.B * 3}[uimm], [Xm|SP] (#uimm < 16)
ld3 {Vn.H * 3}[uimm], [Xm|SP] (#uimm < 8)
ld3 {Vn.S * 3}[uimm], [Xm|SP] (#uimm < 4)
ld3 {Vn.D * 3}[uimm], [Xm|SP] (#uimm < 2)
ld3 {Vn.B * 3}[uimm], [Xm|SP], 3 (#uimm < 16)
ld3 {Vn.B * 3}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
ld3 {Vn.H * 3}[uimm], [Xm|SP], 6 (#uimm < 8)
ld3 {Vn.H * 3}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
ld3 {Vn.S * 3}[uimm], [Xm|SP], 12 (#uimm < 4)
ld3 {Vn.S * 3}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
ld3 {Vn.D * 3}[uimm], [Xm|SP], 24 (#uimm < 2)
ld3 {Vn.D * 3}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
ld3r {Vn.B16 * 3}, [Xm|SP]
ld3r {Vn.B8 * 3}, [Xm|SP]
ld3r {Vn.H8 * 3}, [Xm|SP]
ld3r {Vn.H4 * 3}, [Xm|SP]
ld3r {Vn.S4 * 3}, [Xm|SP]
ld3r {Vn.S2 * 3}, [Xm|SP]
ld3r {Vn.D2 * 3}, [Xm|SP]
ld3r {Vn.D1 * 3}, [Xm|SP]
ld3r {Vn.B16 * 3}, [Xm|SP], 3
ld3r {Vn.B8 * 3}, [Xm|SP], 3
ld3r {Vn.H8 * 3}, [Xm|SP], 6
ld3r {Vn.H4 * 3}, [Xm|SP], 6
ld3r {Vn.S4 * 3}, [Xm|SP], 12
ld3r {Vn.S2 * 3}, [Xm|SP], 12
ld3r {Vn.D2 * 3}, [Xm|SP], 24
ld3r {Vn.D1 * 3}, [Xm|SP], 24
ld3r {Vn.B16 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.B8 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.H8 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.H4 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.S4 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.S2 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.D2 * 3}, [Xm|SP], Xa (a is 0-30)
ld3r {Vn.D1 * 3}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.B16 * 4}, [Xm|SP]
ld4 {Vn.B8 * 4}, [Xm|SP]
ld4 {Vn.H8 * 4}, [Xm|SP]
ld4 {Vn.H4 * 4}, [Xm|SP]
ld4 {Vn.S4 * 4}, [Xm|SP]
ld4 {Vn.S2 * 4}, [Xm|SP]
ld4 {Vn.D2 * 4}, [Xm|SP]
ld4 {Vn.B8 * 4}, [Xm|SP], 32
ld4 {Vn.H4 * 4}, [Xm|SP], 32
ld4 {Vn.S2 * 4}, [Xm|SP], 32
ld4 {Vn.B16 * 4}, [Xm|SP], 64
ld4 {Vn.H8 * 4}, [Xm|SP], 64
ld4 {Vn.S4 * 4}, [Xm|SP], 64
ld4 {Vn.D2 * 4}, [Xm|SP], 64
ld4 {Vn.B16 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.B8 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.H8 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.H4 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.S4 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.S2 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.D2 * 4}, [Xm|SP], Xa (a is 0-30)
ld4 {Vn.B * 4}[uimm], [Xm|SP] (#uimm < 16)
ld4 {Vn.H * 4}[uimm], [Xm|SP] (#uimm < 8)
ld4 {Vn.S * 4}[uimm], [Xm|SP] (#uimm < 4)
ld4 {Vn.D * 4}[uimm], [Xm|SP] (#uimm < 2)
ld4 {Vn.B * 4}[uimm], [Xm|SP], 4 (#uimm < 16)
ld4 {Vn.B * 4}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
ld4 {Vn.H * 4}[uimm], [Xm|SP], 8 (#uimm < 8)
ld4 {Vn.H * 4}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
ld4 {Vn.S * 4}[uimm], [Xm|SP], 16 (#uimm < 4)
ld4 {Vn.S * 4}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
ld4 {Vn.D * 4}[uimm], [Xm|SP], 32 (#uimm < 2)
ld4 {Vn.D * 4}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
ld4r {Vn.B16 * 4}, [Xm|SP]
ld4r {Vn.B8 * 4}, [Xm|SP]
ld4r {Vn.H8 * 4}, [Xm|SP]
ld4r {Vn.H4 * 4}, [Xm|SP]
ld4r {Vn.S4 * 4}, [Xm|SP]
ld4r {Vn.S2 * 4}, [Xm|SP]
ld4r {Vn.D2 * 4}, [Xm|SP]
ld4r {Vn.D1 * 4}, [Xm|SP]
ld4r {Vn.B16 * 4}, [Xm|SP], 4
ld4r {Vn.B8 * 4}, [Xm|SP], 4
ld4r {Vn.H8 * 4}, [Xm|SP], 8
ld4r {Vn.H4 * 4}, [Xm|SP], 8
ld4r {Vn.S4 * 4}, [Xm|SP], 16
ld4r {Vn.S2 * 4}, [Xm|SP], 16
ld4r {Vn.D2 * 4}, [Xm|SP], 32
ld4r {Vn.D1 * 4}, [Xm|SP], 32
ld4r {Vn.B16 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.B8 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.H8 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.H4 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.S4 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.S2 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.D2 * 4}, [Xm|SP], Xa (a is 0-30)
ld4r {Vn.D1 * 4}, [Xm|SP], Xa (a is 0-30)
ldadd Wn, Wm, [Xa|SP]
ldadd Xn, Xm, [Xa|SP]
ldadda Wn, Wm, [Xa|SP]
ldadda Xn, Xm, [Xa|SP]
ldaddal Wn, Wm, [Xa|SP]
ldaddal Xn, Xm, [Xa|SP]
ldaddl Wn, Wm, [Xa|SP]
ldaddl Xn, Xm, [Xa|SP]
ldapr Wn, [Xm|SP]
ldapr Xn, [Xm|SP]
ldapur Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapur Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapurb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapurh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapursb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapursb Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapursh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapursh Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldapursw Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldar Wn, [Xm|SP]
ldar Xn, [Xm|SP]
ldaxp Wn, Wm, [Xa|SP]
ldaxp Xn, Xm, [Xa|SP]
ldaxr Wn, [Xm|SP]
ldaxr Xn, [Xm|SP]
ldclr Wn, Wm, [Xa|SP]
ldclr Xn, Xm, [Xa|SP]
ldclra Wn, Wm, [Xa|SP]
ldclra Xn, Xm, [Xa|SP]
ldclral Wn, Wm, [Xa|SP]
ldclral Xn, Xm, [Xa|SP]
ldclrl Wn, Wm, [Xa|SP]
ldclrl Xn, Xm, [Xa|SP]
ldeor Wn, Wm, [Xa|SP]
ldeor Xn, Xm, [Xa|SP]
ldeora Wn, Wm, [Xa|SP]
ldeora Xn, Xm, [Xa|SP]
ldeoral Wn, Wm, [Xa|SP]
ldeoral Xn, Xm, [Xa|SP]
ldeorl Wn, Wm, [Xa|SP]
ldeorl Xn, Xm, [Xa|SP]
ldlar Wn, [Xm|SP]
ldlar Xn, [Xm|SP]
ldnp Sn, Sm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
ldnp Dn, Dm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
ldnp Qn, Qm, [Xa|SP {, #simm } ] (-1024 <= #simm < 1024, #simm = 16 * N)
ldnp Wn, Wm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
ldnp Xn, Xm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
ldp Sn, Sm, [Xa|SP], #simm (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP], #simm (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP], #simm (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Sn, Sm, [Xa|SP, #simm]! (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP, #simm]! (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP, #simm]! (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Sn, Sm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP {, #simm } ] (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Wn, Wm, [Xa|SP], #simm (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP], #simm (-512 <= #simm < 512, #simm = 8 * N)
ldp Wn, Wm, [Xa|SP, #simm]! (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP, #simm]! (-512 <= #simm < 512, #simm = 8 * N)
ldp Wn, Wm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
ldpsw Xn, Xm, [Xa|SP], #simm (-256 <= #simm < 256, #simm = 4 * N)
ldpsw Xn, Xm, [Xa|SP, #simm]! (-256 <= #simm < 256, #simm = 4 * N)
ldpsw Xn, Xm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
ldr Bn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Hn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Sn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Dn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Qn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Bn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Hn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Sn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Dn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Qn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Bn, [Xm|SP {, #uimm } ] (#uimm < 4096)
ldr Hn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
ldr Sn, [Xm|SP {, #uimm } ] (#uimm < 16384, #uimm = 4 * N)
ldr Dn, [Xm|SP {, #uimm } ] (#uimm < 32768, #uimm = 8 * N)
ldr Qn, [Xm|SP {, #uimm } ] (#uimm < 65536, #uimm = 16 * N)
ldr Wn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Xn, [Xm|SP], #simm (-256 <= #simm < 256)
ldr Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Xn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldr Wn, [Xm|SP {, #uimm } ] (#uimm < 16384, #uimm = 4 * N)
ldr Xn, [Xm|SP {, #uimm } ] (#uimm < 32768, #uimm = 8 * N)
ldr Sn, <offset> (offset is 19 bit, 4-byte aligned)
ldr Dn, <offset> (offset is 19 bit, 4-byte aligned)
ldr Qn, <offset> (offset is 19 bit, 4-byte aligned)
ldr Wn, <offset> (offset is 19 bit, 4-byte aligned)
ldr Xn, <offset> (offset is 19 bit, 4-byte aligned)
ldr Bn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
ldr Hn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
ldr Sn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 2])
ldr Dn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 3])
ldr Qn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 4])
ldr Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 2])
ldr Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 3])
ldraa Xn, [Xm|SP {, #simm } ] (-4096 <= #simm < 4096, #simm = 8 * N)
ldraa Xn, [Xm|SP, #simm]! (-4096 <= #simm < 4096, #simm = 8 * N)
ldrab Xn, [Xm|SP {, #simm } ] (-4096 <= #simm < 4096, #simm = 8 * N)
ldrab Xn, [Xm|SP, #simm]! (-4096 <= #simm < 4096, #simm = 8 * N)
ldrb Wn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrb Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrb Wn, [Xm|SP {, #uimm } ] (#uimm < 4096)
ldrb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
ldrh Wn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrh Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrh Wn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
ldrh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
ldrsb Wn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrsb Xn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrsb Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrsb Xn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrsb Wn, [Xm|SP {, #uimm } ] (#uimm < 4096)
ldrsb Xn, [Xm|SP {, #uimm } ] (#uimm < 4096)
ldrsb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
ldrsb Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
ldrsh Wn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrsh Xn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrsh Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrsh Xn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrsh Wn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
ldrsh Xn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
ldrsh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
ldrsh Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
ldrsw Xn, [Xm|SP], #simm (-256 <= #simm < 256)
ldrsw Xn, [Xm|SP, #simm]! (-256 <= #simm < 256)
ldrsw Xn, [Xm|SP {, #uimm } ] (#uimm < 16384, #uimm = 4 * N)
ldrsw Xn, <offset> (offset is 19 bit, 4-byte aligned)
ldrsw Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 2])
ldset Wn, Wm, [Xa|SP]
ldset Xn, Xm, [Xa|SP]
ldseta Wn, Wm, [Xa|SP]
ldseta Xn, Xm, [Xa|SP]
ldsetal Wn, Wm, [Xa|SP]
ldsetal Xn, Xm, [Xa|SP]
ldsetl Wn, Wm, [Xa|SP]
ldsetl Xn, Xm, [Xa|SP]
ldsmax Wn, Wm, [Xa|SP]
ldsmax Xn, Xm, [Xa|SP]
ldsmaxa Wn, Wm, [Xa|SP]
ldsmaxa Xn, Xm, [Xa|SP]
ldsmaxal Wn, Wm, [Xa|SP]
ldsmaxal Xn, Xm, [Xa|SP]
ldsmaxalb Wn, Wm, [Xa|SP]
ldsmaxalh Wn, Wm, [Xa|SP]
ldsmaxl Wn, Wm, [Xa|SP]
ldsmaxl Xn, Xm, [Xa|SP]
ldsmin Wn, Wm, [Xa|SP]
ldsmin Xn, Xm, [Xa|SP]
ldsmina Wn, Wm, [Xa|SP]
ldsmina Xn, Xm, [Xa|SP]
ldsminal Wn, Wm, [Xa|SP]
ldsminal Xn, Xm, [Xa|SP]
ldsminalb Wn, Wm, [Xa|SP]
ldsminalh Wn, Wm, [Xa|SP]
ldsminl Wn, Wm, [Xa|SP]
ldsminl Xn, Xm, [Xa|SP]
ldtr Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtr Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrsb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrsb Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrsh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrsh Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldtrsw Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldumax Wn, Wm, [Xa|SP]
ldumax Xn, Xm, [Xa|SP]
ldumaxa Wn, Wm, [Xa|SP]
ldumaxa Xn, Xm, [Xa|SP]
ldumaxal Wn, Wm, [Xa|SP]
ldumaxal Xn, Xm, [Xa|SP]
ldumaxalb Wn, Wm, [Xa|SP]
ldumaxalh Wn, Wm, [Xa|SP]
ldumaxl Wn, Wm, [Xa|SP]
ldumaxl Xn, Xm, [Xa|SP]
ldumin Wn, Wm, [Xa|SP]
ldumin Xn, Xm, [Xa|SP]
ldumina Wn, Wm, [Xa|SP]
ldumina Xn, Xm, [Xa|SP]
lduminal Wn, Wm, [Xa|SP]
lduminal Xn, Xm, [Xa|SP]
lduminalb Wn, Wm, [Xa|SP]
lduminalh Wn, Wm, [Xa|SP]
lduminl Wn, Wm, [Xa|SP]
lduminl Xn, Xm, [Xa|SP]
ldur Bn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Hn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Sn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Dn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Qn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldur Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldurb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldurh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldursb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldursb Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldursh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldursh Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldursw Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
ldxp Wn, Wm, [Xa|SP]
ldxp Xn, Xm, [Xa|SP]
ldxr Wn, [Xm|SP]
ldxr Xn, [Xm|SP]
lsl Wn, Wm, Wa
lsl Xn, Xm, Xa
lsl Wn, Wm, #uimm (0 <= #uimm < 32)
lsl Xn, Xm, #uimm (0 <= #uimm < 64)
lslv Wn, Wm, Wa
lslv Xn, Xm, Xa
lsr Wn, Wm, Wa
lsr Xn, Xm, Xa
lsr Wn, Wm, #uimm (#uimm < 32)
lsr Xn, Xm, #uimm (#uimm < 64)
lsrv Wn, Wm, Wa
lsrv Xn, Xm, Xa
madd Wn, Wm, Wa, Wb
madd Xn, Xm, Xa, Xb
mla Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
mla Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
mla Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
mla Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
mla Vn.B16, Vm.B16, Va.B16
mla Vn.B8, Vm.B8, Va.B8
mla Vn.H8, Vm.H8, Va.H8
mla Vn.H4, Vm.H4, Va.H4
mla Vn.S4, Vm.S4, Va.S4
mla Vn.S2, Vm.S2, Va.S2
mls Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
mls Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
mls Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
mls Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
mls Vn.B16, Vm.B16, Va.B16
mls Vn.B8, Vm.B8, Va.B8
mls Vn.H8, Vm.H8, Va.H8
mls Vn.H4, Vm.H4, Va.H4
mls Vn.S4, Vm.S4, Va.S4
mls Vn.S2, Vm.S2, Va.S2
mneg Wn, Wm, Wa
mneg Xn, Xm, Xa
mov Wn, Wm
mov Xn, Xm
mov Wn|WSP, Wm|WSP
mov Xn|SP, Xm|SP
mov Bn, Vm.B[uimm] (#uimm < 16)
mov Hn, Vm.H[uimm] (#uimm < 8)
mov Sn, Vm.S[uimm] (#uimm < 4)
mov Dn, Vm.D[uimm] (#uimm < 2)
mov Vn.B[uimm], Vm.B[uimm1] (#uimm < 16, #uimm1 < 16)
mov Vn.H[uimm], Vm.H[uimm1] (#uimm < 8, #uimm1 < 8)
mov Vn.S[uimm], Vm.S[uimm1] (#uimm < 4, #uimm1 < 4)
mov Vn.D[uimm], Vm.D[uimm1] (#uimm < 2, #uimm1 < 2)
mov Vn.B[uimm], Wm (#uimm < 16)
mov Vn.H[uimm], Wm (#uimm < 8)
mov Vn.S[uimm], Wm (#uimm < 4)
mov Vn.D[uimm], Xm (#uimm < 2)
mov.inverted Wn, #imm (#imm is a wide immediate)
mov.inverted Xn, #imm (#imm is a wide immediate)
mov Wn, #imm (#imm is a wide immediate)
mov Xn, #imm (#imm is a wide immediate)
mov Vn.B16, Vm.B16
mov Vn.B8, Vm.B8
mov.logical Wn|WSP, #imm (#imm is a logical immediate)
mov.logical Xn|SP, #imm (#imm is a logical immediate)
mov Wn, Vm.S[uimm] (#uimm < 4)
mov Xn, Vm.D[uimm] (#uimm < 2)
movi Vn.B16, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 < 1)
movi Vn.B8, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 < 1)
movi Vn.H8, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
movi Vn.H4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
movi Vn.S4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
movi Vn.S2, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
movi Vn.S4, #uimm, MSL #uimm1 (#uimm < 256, #uimm1 = [8, 16])
movi Vn.S2, #uimm, MSL #uimm1 (#uimm < 256, #uimm1 = [8, 16])
movi Dn, #imm (#imm is a stretched immediate)
movi Vn.D2, #imm (#imm is a stretched immediate)
movk Wn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16])
movk Xn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16, 32, 48])
movn Wn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16])
movn Xn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16, 32, 48])
movz Wn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16])
movz Xn, #uimm {, LSL #uimm1 } (#uimm < 65536, #uimm1 = [0, 16, 32, 48])
mrs Xn, #uimm (#uimm < 32768)
msr msr_imm_op, #uimm (#uimm < 16)
msr #uimm, Xn (#uimm < 32768)
msub Wn, Wm, Wa, Wb
msub Xn, Xm, Xa, Xb
mul Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
mul Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
mul Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
mul Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
mul Vn.B16, Vm.B16, Va.B16
mul Vn.B8, Vm.B8, Va.B8
mul Vn.H8, Vm.H8, Va.H8
mul Vn.H4, Vm.H4, Va.H4
mul Vn.S4, Vm.S4, Va.S4
mul Vn.S2, Vm.S2, Va.S2
mul Wn, Wm, Wa
mul Xn, Xm, Xa
mvn Wn, Wm {, LSL|LSR|ASR #uimm } (#uimm < 32)
mvn Xn, Xm {, LSL|LSR|ASR #uimm } (#uimm < 64)
mvn Vn.B16, Vm.B16
mvn Vn.B8, Vm.B8
mvni Vn.H8, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
mvni Vn.H4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
mvni Vn.S4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
mvni Vn.S2, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
mvni Vn.S4, #uimm, MSL #uimm1 (#uimm < 256, #uimm1 = [8, 16])
mvni Vn.S2, #uimm, MSL #uimm1 (#uimm < 256, #uimm1 = [8, 16])
neg Wn, Wm {, LSL|LSR|ASR #uimm } (#uimm < 32)
neg Xn, Xm {, LSL|LSR|ASR #uimm } (#uimm < 64)
neg Dn, Dm
neg Vn.B16, Vm.B16
neg Vn.B8, Vm.B8
neg Vn.H8, Vm.H8
neg Vn.H4, Vm.H4
neg Vn.S4, Vm.S4
neg Vn.S2, Vm.S2
neg Vn.D2, Vm.D2
negs Wn, Wm {, LSL|LSR|ASR #uimm } (#uimm < 32)
negs Xn, Xm {, LSL|LSR|ASR #uimm } (#uimm < 64)
not Vn.B16, Vm.B16
not Vn.B8, Vm.B8
orn Vn.B16, Vm.B16, Va.B16
orn Vn.B8, Vm.B8, Va.B8
orn Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
orn Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
orr Vn.H8, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
orr Vn.H4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8])
orr Vn.S4, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
orr Vn.S2, #uimm {, LSL #uimm1 } (#uimm < 256, #uimm1 = [0, 8, 16, 24])
orr Vn.B16, Vm.B16, Va.B16
orr Vn.B8, Vm.B8, Va.B8
orr Wn|WSP, Wm, #imm (#imm is a logical immediate)
orr Xn|SP, Xm, #imm (#imm is a logical immediate)
orr Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
orr Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
pmul Vn.B16, Vm.B16, Va.B16
pmul Vn.B8, Vm.B8, Va.B8
pmull Vn.H8, Vm.B8, Va.B8
pmull Vn.Q1, Vm.D1, Va.D1
pmull2 Vn.H8, Vm.B16, Va.B16
pmull2 Vn.Q1, Vm.D2, Va.D2
prfm #uimm, <offset> (#uimm < 32, offset is 19 bit, 4-byte aligned)
prfm #uimm, [Xn|SP, Wm|Xm { , UXTW|LSL|SXTW|SXTX { #uimm1 } } ] (#uimm < 32, #uimm1 = [0, 3])
prfum #uimm, [Xn|SP {, #simm1 } ] (#uimm < 32, -256 <= #simm1 < 256)
raddhn Vn.B8, Vm.H8, Va.H8
raddhn Vn.H4, Vm.S4, Va.S4
raddhn Vn.S2, Vm.D2, Va.D2
raddhn2 Vn.B16, Vm.H8, Va.H8
raddhn2 Vn.H8, Vm.S4, Va.S4
raddhn2 Vn.S4, Vm.D2, Va.D2
rbit Vn.B16, Vm.B16
rbit Vn.B8, Vm.B8
rbit Wn, Wm
rbit Xn, Xm
rev16 Vn.B16, Vm.B16
rev16 Vn.B8, Vm.B8
rev16 Wn, Wm
rev16 Xn, Xm
rev32 Vn.B16, Vm.B16
rev32 Vn.B8, Vm.B8
rev32 Vn.H8, Vm.H8
rev32 Vn.H4, Vm.H4
rev32 Xn, Xm
rev64 Vn.B16, Vm.B16
rev64 Vn.B8, Vm.B8
rev64 Vn.H8, Vm.H8
rev64 Vn.H4, Vm.H4
rev64 Vn.S4, Vm.S4
rev64 Vn.S2, Vm.S2
rev64 Xn, Xm
rmif Xn, #uimm, #uimm1 (#uimm < 64, #uimm1 < 16)
ror Wn, Wm, #uimm (#uimm < 32)
ror Xn, Xm, #uimm (#uimm < 64)
ror Wn, Wm, Wa
ror Xn, Xm, Xa
rorv Wn, Wm, Wa
rorv Xn, Xm, Xa
rshrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
rshrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
rshrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
rshrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
rshrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
rshrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
rsubhn Vn.B8, Vm.H8, Va.H8
rsubhn Vn.H4, Vm.S4, Va.S4
rsubhn Vn.S2, Vm.D2, Va.D2
rsubhn2 Vn.B16, Vm.H8, Va.H8
rsubhn2 Vn.H8, Vm.S4, Va.S4
rsubhn2 Vn.S4, Vm.D2, Va.D2
saba Vn.B16, Vm.B16, Va.B16
saba Vn.B8, Vm.B8, Va.B8
saba Vn.H8, Vm.H8, Va.H8
saba Vn.H4, Vm.H4, Va.H4
saba Vn.S4, Vm.S4, Va.S4
saba Vn.S2, Vm.S2, Va.S2
sabal Vn.H8, Vm.B8, Va.B8
sabal Vn.S4, Vm.H4, Va.H4
sabal Vn.D2, Vm.S2, Va.S2
sabal2 Vn.H8, Vm.B16, Va.B16
sabal2 Vn.S4, Vm.H8, Va.H8
sabal2 Vn.D2, Vm.S4, Va.S4
sabd Vn.B16, Vm.B16, Va.B16
sabd Vn.B8, Vm.B8, Va.B8
sabd Vn.H8, Vm.H8, Va.H8
sabd Vn.H4, Vm.H4, Va.H4
sabd Vn.S4, Vm.S4, Va.S4
sabd Vn.S2, Vm.S2, Va.S2
sabdl Vn.H8, Vm.B8, Va.B8
sabdl Vn.S4, Vm.H4, Va.H4
sabdl Vn.D2, Vm.S2, Va.S2
sabdl2 Vn.H8, Vm.B16, Va.B16
sabdl2 Vn.S4, Vm.H8, Va.H8
sabdl2 Vn.D2, Vm.S4, Va.S4
sadalp Vn.H8, Vm.B16
sadalp Vn.H4, Vm.B8
sadalp Vn.S4, Vm.H8
sadalp Vn.S2, Vm.H4
sadalp Vn.D2, Vm.S4
sadalp Vn.D1, Vm.S2
saddl Vn.H8, Vm.B8, Va.B8
saddl Vn.S4, Vm.H4, Va.H4
saddl Vn.D2, Vm.S2, Va.S2
saddl2 Vn.H8, Vm.B16, Va.B16
saddl2 Vn.S4, Vm.H8, Va.H8
saddl2 Vn.D2, Vm.S4, Va.S4
saddlp Vn.H8, Vm.B16
saddlp Vn.H4, Vm.B8
saddlp Vn.S4, Vm.H8
saddlp Vn.S2, Vm.H4
saddlp Vn.D2, Vm.S4
saddlp Vn.D1, Vm.S2
saddlv Hn, Vm.B16
saddlv Hn, Vm.B8
saddlv Sn, Vm.H8
saddlv Sn, Vm.H4
saddlv Dn, Vm.S4
saddw Vn.H8, Vm.H8, Va.B8
saddw Vn.S4, Vm.S4, Va.H4
saddw Vn.D2, Vm.D2, Va.S2
saddw2 Vn.H8, Vm.H8, Va.B16
saddw2 Vn.S4, Vm.S4, Va.H8
saddw2 Vn.D2, Vm.D2, Va.S4
sbc Wn, Wm, Wa
sbc Xn, Xm, Xa
sbcs Wn, Wm, Wa
sbcs Xn, Xm, Xa
sbfiz Wn, Wm, #uimm, #uimm1 (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
sbfiz Xn, Xm, #uimm, #uimm1 (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)
sbfm Wn, Wm, #uimm, #uimm1 (#uimm < 32, #uimm1 < 32)
sbfm Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
sbfx Wn, Wm, #uimm, #uimm1 (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
sbfx Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
scvtf Hn, Hm, #uimm (1 <= #uimm <= 16)
scvtf Sn, Sm, #uimm (1 <= #uimm <= 32)
scvtf Dn, Dm, #uimm (1 <= #uimm <= 64)
scvtf Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
scvtf Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
scvtf Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
scvtf Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
scvtf Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
scvtf Hn, Hm
scvtf Sn, Sm
scvtf Dn, Dm
scvtf Vn.H8, Vm.H8
scvtf Vn.H4, Vm.H4
scvtf Vn.S4, Vm.S4
scvtf Vn.S2, Vm.S2
scvtf Vn.D2, Vm.D2
scvtf Hn, Wm, #uimm (1 <= #uimm <= 32)
scvtf Sn, Wm, #uimm (1 <= #uimm <= 32)
scvtf Dn, Wm, #uimm (1 <= #uimm <= 32)
scvtf Hn, Xm, #uimm (1 <= #uimm <= 64)
scvtf Sn, Xm, #uimm (1 <= #uimm <= 64)
scvtf Dn, Xm, #uimm (1 <= #uimm <= 64)
scvtf Hn, Wm
scvtf Sn, Wm
scvtf Dn, Wm
scvtf Hn, Xm
scvtf Sn, Xm
scvtf Dn, Xm
sdiv Wn, Wm, Wa
sdiv Xn, Xm, Xa
sdot Vn.S2, Vm.B8, Va.B4[uimm] (#uimm < 4)
sdot Vn.S4, Vm.B16, Va.B4[uimm] (#uimm < 4)
sdot Vn.S2, Vm.B8, Va.B8
sdot Vn.S4, Vm.B16, Va.B16
sha1su0 Vn.S4, Vm.S4, Va.S4
sha256su1 Vn.S4, Vm.S4, Va.S4
sha512su1 Vn.D2, Vm.D2, Va.D2
shadd Vn.B16, Vm.B16, Va.B16
shadd Vn.B8, Vm.B8, Va.B8
shadd Vn.H8, Vm.H8, Va.H8
shadd Vn.H4, Vm.H4, Va.H4
shadd Vn.S4, Vm.S4, Va.S4
shadd Vn.S2, Vm.S2, Va.S2
shl Dn, Dm, #uimm (#uimm < 64)
shl Vn.B16, Vm.B16, #uimm (#uimm < 8)
shl Vn.B8, Vm.B8, #uimm (#uimm < 8)
shl Vn.H8, Vm.H8, #uimm (#uimm < 16)
shl Vn.H4, Vm.H4, #uimm (#uimm < 16)
shl Vn.S4, Vm.S4, #uimm (#uimm < 32)
shl Vn.S2, Vm.S2, #uimm (#uimm < 32)
shl Vn.D2, Vm.D2, #uimm (#uimm < 64)
shll Vn.H8, Vm.B8, 8
shll Vn.S4, Vm.H4, 16
shll Vn.D2, Vm.S2, 32
shll2 Vn.H8, Vm.B16, 8
shll2 Vn.S4, Vm.H8, 16
shll2 Vn.D2, Vm.S4, 32
shrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
shrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
shrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
shrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
shrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
shrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
shsub Vn.B16, Vm.B16, Va.B16
shsub Vn.B8, Vm.B8, Va.B8
shsub Vn.H8, Vm.H8, Va.H8
shsub Vn.H4, Vm.H4, Va.H4
shsub Vn.S4, Vm.S4, Va.S4
shsub Vn.S2, Vm.S2, Va.S2
sli Dn, Dm, #uimm (#uimm < 64)
sli Vn.B16, Vm.B16, #uimm (#uimm < 8)
sli Vn.B8, Vm.B8, #uimm (#uimm < 8)
sli Vn.H8, Vm.H8, #uimm (#uimm < 16)
sli Vn.H4, Vm.H4, #uimm (#uimm < 16)
sli Vn.S4, Vm.S4, #uimm (#uimm < 32)
sli Vn.S2, Vm.S2, #uimm (#uimm < 32)
sli Vn.D2, Vm.D2, #uimm (#uimm < 64)
sm3partw1 Vn.S4, Vm.S4, Va.S4
sm3partw2 Vn.S4, Vm.S4, Va.S4
sm3ss1 Vn.S4, Vm.S4, Va.S4, Vb.S4
sm3tt1a Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sm3tt1b Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sm3tt2a Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sm3tt2b Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sm4ekey Vn.S4, Vm.S4, Va.S4
smax Vn.B16, Vm.B16, Va.B16
smax Vn.B8, Vm.B8, Va.B8
smax Vn.H8, Vm.H8, Va.H8
smax Vn.H4, Vm.H4, Va.H4
smax Vn.S4, Vm.S4, Va.S4
smax Vn.S2, Vm.S2, Va.S2
smaxp Vn.B16, Vm.B16, Va.B16
smaxp Vn.B8, Vm.B8, Va.B8
smaxp Vn.H8, Vm.H8, Va.H8
smaxp Vn.H4, Vm.H4, Va.H4
smaxp Vn.S4, Vm.S4, Va.S4
smaxp Vn.S2, Vm.S2, Va.S2
smaxv Bn, Vm.B16
smaxv Bn, Vm.B8
smaxv Hn, Vm.H8
smaxv Hn, Vm.H4
smaxv Sn, Vm.S4
smc #uimm (#uimm < 65536)
smin Vn.B16, Vm.B16, Va.B16
smin Vn.B8, Vm.B8, Va.B8
smin Vn.H8, Vm.H8, Va.H8
smin Vn.H4, Vm.H4, Va.H4
smin Vn.S4, Vm.S4, Va.S4
smin Vn.S2, Vm.S2, Va.S2
sminp Vn.B16, Vm.B16, Va.B16
sminp Vn.B8, Vm.B8, Va.B8
sminp Vn.H8, Vm.H8, Va.H8
sminp Vn.H4, Vm.H4, Va.H4
sminp Vn.S4, Vm.S4, Va.S4
sminp Vn.S2, Vm.S2, Va.S2
sminv Bn, Vm.B16
sminv Bn, Vm.B8
sminv Hn, Vm.H8
sminv Hn, Vm.H4
sminv Sn, Vm.S4
smlal Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
smlal Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
smlal Vn.H8, Vm.B8, Va.B8
smlal Vn.S4, Vm.H4, Va.H4
smlal Vn.D2, Vm.S2, Va.S2
smlal2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
smlal2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
smlal2 Vn.H8, Vm.B16, Va.B16
smlal2 Vn.S4, Vm.H8, Va.H8
smlal2 Vn.D2, Vm.S4, Va.S4
smlsl Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
smlsl Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
smlsl Vn.H8, Vm.B8, Va.B8
smlsl Vn.S4, Vm.H4, Va.H4
smlsl Vn.D2, Vm.S2, Va.S2
smlsl2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
smlsl2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
smlsl2 Vn.H8, Vm.B16, Va.B16
smlsl2 Vn.S4, Vm.H8, Va.H8
smlsl2 Vn.D2, Vm.S4, Va.S4
smov Wn, Vm.B[uimm] (#uimm < 16)
smov Wn, Vm.H[uimm] (#uimm < 8)
smov Xn, Vm.B[uimm] (#uimm < 16)
smov Xn, Vm.H[uimm] (#uimm < 8)
smov Xn, Vm.S[uimm] (#uimm < 4)
smull Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
smull Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
smull Vn.H8, Vm.B8, Va.B8
smull Vn.S4, Vm.H4, Va.H4
smull Vn.D2, Vm.S2, Va.S2
smull Xn, Wm, Wa
smull2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
smull2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
smull2 Vn.H8, Vm.B16, Va.B16
smull2 Vn.S4, Vm.H8, Va.H8
smull2 Vn.D2, Vm.S4, Va.S4
sqabs Bn, Bm
sqabs Hn, Hm
sqabs Sn, Sm
sqabs Dn, Dm
sqabs Vn.B16, Vm.B16
sqabs Vn.B8, Vm.B8
sqabs Vn.H8, Vm.H8
sqabs Vn.H4, Vm.H4
sqabs Vn.S4, Vm.S4
sqabs Vn.S2, Vm.S2
sqabs Vn.D2, Vm.D2
sqadd Bn, Bm, Ba
sqadd Hn, Hm, Ha
sqadd Sn, Sm, Sa
sqadd Dn, Dm, Da
sqadd Vn.B16, Vm.B16, Va.B16
sqadd Vn.B8, Vm.B8, Va.B8
sqadd Vn.H8, Vm.H8, Va.H8
sqadd Vn.H4, Vm.H4, Va.H4
sqadd Vn.S4, Vm.S4, Va.S4
sqadd Vn.S2, Vm.S2, Va.S2
sqadd Vn.D2, Vm.D2, Va.D2
sqdmlal Sn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlal Dn, Sm, Va.S[uimm] (#uimm < 4)
sqdmlal Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlal Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqdmlal Sn, Hm, Ha
sqdmlal Dn, Sm, Sa
sqdmlal Vn.S4, Vm.H4, Va.H4
sqdmlal Vn.D2, Vm.S2, Va.S2
sqdmlal2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlal2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
sqdmlal2 Vn.S4, Vm.H8, Va.H8
sqdmlal2 Vn.D2, Vm.S4, Va.S4
sqdmlsl Sn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlsl Dn, Sm, Va.S[uimm] (#uimm < 4)
sqdmlsl Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlsl Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqdmlsl Sn, Hm, Ha
sqdmlsl Dn, Sm, Sa
sqdmlsl Vn.S4, Vm.H4, Va.H4
sqdmlsl Vn.D2, Vm.S2, Va.S2
sqdmlsl2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmlsl2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
sqdmlsl2 Vn.S4, Vm.H8, Va.H8
sqdmlsl2 Vn.D2, Vm.S4, Va.S4
sqdmulh Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmulh Sn, Sm, Va.S[uimm] (#uimm < 4)
sqdmulh Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmulh Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmulh Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sqdmulh Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqdmulh Hn, Hm, Ha
sqdmulh Sn, Sm, Sa
sqdmulh Vn.H8, Vm.H8, Va.H8
sqdmulh Vn.H4, Vm.H4, Va.H4
sqdmulh Vn.S4, Vm.S4, Va.S4
sqdmulh Vn.S2, Vm.S2, Va.S2
sqdmull Sn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmull Dn, Sm, Va.S[uimm] (#uimm < 4)
sqdmull Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmull Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqdmull Sn, Hm, Ha
sqdmull Dn, Sm, Sa
sqdmull Vn.S4, Vm.H4, Va.H4
sqdmull Vn.D2, Vm.S2, Va.S2
sqdmull2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqdmull2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
sqdmull2 Vn.S4, Vm.H8, Va.H8
sqdmull2 Vn.D2, Vm.S4, Va.S4
sqneg Bn, Bm
sqneg Hn, Hm
sqneg Sn, Sm
sqneg Dn, Dm
sqneg Vn.B16, Vm.B16
sqneg Vn.B8, Vm.B8
sqneg Vn.H8, Vm.H8
sqneg Vn.H4, Vm.H4
sqneg Vn.S4, Vm.S4
sqneg Vn.S2, Vm.S2
sqneg Vn.D2, Vm.D2
sqrdmlah Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlah Sn, Sm, Va.S[uimm] (#uimm < 4)
sqrdmlah Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlah Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlah Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sqrdmlah Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqrdmlah Hn, Hm, Ha
sqrdmlah Sn, Sm, Sa
sqrdmlah Vn.H8, Vm.H8, Va.H8
sqrdmlah Vn.H4, Vm.H4, Va.H4
sqrdmlah Vn.S4, Vm.S4, Va.S4
sqrdmlah Vn.S2, Vm.S2, Va.S2
sqrdmlsh Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlsh Sn, Sm, Va.S[uimm] (#uimm < 4)
sqrdmlsh Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlsh Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmlsh Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sqrdmlsh Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqrdmlsh Hn, Hm, Ha
sqrdmlsh Sn, Sm, Sa
sqrdmlsh Vn.H8, Vm.H8, Va.H8
sqrdmlsh Vn.H4, Vm.H4, Va.H4
sqrdmlsh Vn.S4, Vm.S4, Va.S4
sqrdmlsh Vn.S2, Vm.S2, Va.S2
sqrdmulh Hn, Hm, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmulh Sn, Sm, Va.S[uimm] (#uimm < 4)
sqrdmulh Vn.H8, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmulh Vn.H4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
sqrdmulh Vn.S4, Vm.S4, Va.S[uimm] (#uimm < 4)
sqrdmulh Vn.S2, Vm.S2, Va.S[uimm] (#uimm < 4)
sqrdmulh Hn, Hm, Ha
sqrdmulh Sn, Sm, Sa
sqrdmulh Vn.H8, Vm.H8, Va.H8
sqrdmulh Vn.H4, Vm.H4, Va.H4
sqrdmulh Vn.S4, Vm.S4, Va.S4
sqrdmulh Vn.S2, Vm.S2, Va.S2
sqrshl Bn, Bm, Ba
sqrshl Hn, Hm, Ha
sqrshl Sn, Sm, Sa
sqrshl Dn, Dm, Da
sqrshl Vn.B16, Vm.B16, Va.B16
sqrshl Vn.B8, Vm.B8, Va.B8
sqrshl Vn.H8, Vm.H8, Va.H8
sqrshl Vn.H4, Vm.H4, Va.H4
sqrshl Vn.S4, Vm.S4, Va.S4
sqrshl Vn.S2, Vm.S2, Va.S2
sqrshl Vn.D2, Vm.D2, Va.D2
sqrshrn Bn, Hm, #uimm (1 <= #uimm <= 8)
sqrshrn Hn, Sm, #uimm (1 <= #uimm <= 16)
sqrshrn Sn, Dm, #uimm (1 <= #uimm <= 32)
sqrshrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
sqrshrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
sqrshrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
sqrshrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
sqrshrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
sqrshrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
sqrshrun Bn, Hm, #uimm (1 <= #uimm <= 8)
sqrshrun Hn, Sm, #uimm (1 <= #uimm <= 16)
sqrshrun Sn, Dm, #uimm (1 <= #uimm <= 32)
sqrshrun Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
sqrshrun Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
sqrshrun Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
sqrshrun2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
sqrshrun2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
sqrshrun2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
sqshl Bn, Bm, #uimm (#uimm < 8)
sqshl Hn, Hm, #uimm (#uimm < 16)
sqshl Sn, Sm, #uimm (#uimm < 32)
sqshl Dn, Dm, #uimm (#uimm < 64)
sqshl Vn.B16, Vm.B16, #uimm (#uimm < 8)
sqshl Vn.B8, Vm.B8, #uimm (#uimm < 8)
sqshl Vn.H8, Vm.H8, #uimm (#uimm < 16)
sqshl Vn.H4, Vm.H4, #uimm (#uimm < 16)
sqshl Vn.S4, Vm.S4, #uimm (#uimm < 32)
sqshl Vn.S2, Vm.S2, #uimm (#uimm < 32)
sqshl Vn.D2, Vm.D2, #uimm (#uimm < 64)
sqshl Bn, Bm, Ba
sqshl Hn, Hm, Ha
sqshl Sn, Sm, Sa
sqshl Dn, Dm, Da
sqshl Vn.B16, Vm.B16, Va.B16
sqshl Vn.B8, Vm.B8, Va.B8
sqshl Vn.H8, Vm.H8, Va.H8
sqshl Vn.H4, Vm.H4, Va.H4
sqshl Vn.S4, Vm.S4, Va.S4
sqshl Vn.S2, Vm.S2, Va.S2
sqshl Vn.D2, Vm.D2, Va.D2
sqshlu Bn, Bm, #uimm (#uimm < 8)
sqshlu Hn, Hm, #uimm (#uimm < 16)
sqshlu Sn, Sm, #uimm (#uimm < 32)
sqshlu Dn, Dm, #uimm (#uimm < 64)
sqshlu Vn.B16, Vm.B16, #uimm (#uimm < 8)
sqshlu Vn.B8, Vm.B8, #uimm (#uimm < 8)
sqshlu Vn.H8, Vm.H8, #uimm (#uimm < 16)
sqshlu Vn.H4, Vm.H4, #uimm (#uimm < 16)
sqshlu Vn.S4, Vm.S4, #uimm (#uimm < 32)
sqshlu Vn.S2, Vm.S2, #uimm (#uimm < 32)
sqshlu Vn.D2, Vm.D2, #uimm (#uimm < 64)
sqshrn Bn, Hm, #uimm (1 <= #uimm <= 8)
sqshrn Hn, Sm, #uimm (1 <= #uimm <= 16)
sqshrn Sn, Dm, #uimm (1 <= #uimm <= 32)
sqshrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
sqshrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
sqshrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
sqshrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
sqshrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
sqshrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
sqshrun Bn, Hm, #uimm (1 <= #uimm <= 8)
sqshrun Hn, Sm, #uimm (1 <= #uimm <= 16)
sqshrun Sn, Dm, #uimm (1 <= #uimm <= 32)
sqshrun Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
sqshrun Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
sqshrun Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
sqshrun2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
sqshrun2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
sqshrun2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
sqsub Bn, Bm, Ba
sqsub Hn, Hm, Ha
sqsub Sn, Sm, Sa
sqsub Dn, Dm, Da
sqsub Vn.B16, Vm.B16, Va.B16
sqsub Vn.B8, Vm.B8, Va.B8
sqsub Vn.H8, Vm.H8, Va.H8
sqsub Vn.H4, Vm.H4, Va.H4
sqsub Vn.S4, Vm.S4, Va.S4
sqsub Vn.S2, Vm.S2, Va.S2
sqsub Vn.D2, Vm.D2, Va.D2
sqxtn Bn, Hm
sqxtn Hn, Sm
sqxtn Sn, Dm
sqxtn Vn.B8, Vm.H8
sqxtn Vn.H4, Vm.S4
sqxtn Vn.S2, Vm.D2
sqxtn2 Vn.B16, Vm.H8
sqxtn2 Vn.H8, Vm.S4
sqxtn2 Vn.S4, Vm.D2
sqxtun Bn, Hm
sqxtun Hn, Sm
sqxtun Sn, Dm
sqxtun Vn.B8, Vm.H8
sqxtun Vn.H4, Vm.S4
sqxtun Vn.S2, Vm.D2
sqxtun2 Vn.B16, Vm.H8
sqxtun2 Vn.H8, Vm.S4
sqxtun2 Vn.S4, Vm.D2
srhadd Vn.B16, Vm.B16, Va.B16
srhadd Vn.B8, Vm.B8, Va.B8
srhadd Vn.H8, Vm.H8, Va.H8
srhadd Vn.H4, Vm.H4, Va.H4
srhadd Vn.S4, Vm.S4, Va.S4
srhadd Vn.S2, Vm.S2, Va.S2
sri Dn, Dm, #uimm (1 <= #uimm <= 64)
sri Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
sri Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
sri Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
sri Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
sri Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
sri Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
sri Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
srshl Dn, Dm, Da
srshl Vn.B16, Vm.B16, Va.B16
srshl Vn.B8, Vm.B8, Va.B8
srshl Vn.H8, Vm.H8, Va.H8
srshl Vn.H4, Vm.H4, Va.H4
srshl Vn.S4, Vm.S4, Va.S4
srshl Vn.S2, Vm.S2, Va.S2
srshl Vn.D2, Vm.D2, Va.D2
srshr Dn, Dm, #uimm (1 <= #uimm <= 64)
srshr Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
srshr Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
srshr Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
srshr Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
srshr Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
srshr Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
srshr Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
srsra Dn, Dm, #uimm (1 <= #uimm <= 64)
srsra Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
srsra Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
srsra Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
srsra Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
srsra Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
srsra Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
srsra Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
sshl Dn, Dm, Da
sshl Vn.B16, Vm.B16, Va.B16
sshl Vn.B8, Vm.B8, Va.B8
sshl Vn.H8, Vm.H8, Va.H8
sshl Vn.H4, Vm.H4, Va.H4
sshl Vn.S4, Vm.S4, Va.S4
sshl Vn.S2, Vm.S2, Va.S2
sshl Vn.D2, Vm.D2, Va.D2
sshll Vn.H8, Vm.B8, #uimm (#uimm < 8)
sshll Vn.S4, Vm.H4, #uimm (#uimm < 16)
sshll Vn.D2, Vm.S2, #uimm (#uimm < 32)
sshll2 Vn.H8, Vm.B16, #uimm (#uimm < 8)
sshll2 Vn.S4, Vm.H8, #uimm (#uimm < 16)
sshll2 Vn.D2, Vm.S4, #uimm (#uimm < 32)
sshr Dn, Dm, #uimm (1 <= #uimm <= 64)
sshr Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
sshr Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
sshr Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
sshr Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
sshr Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
sshr Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
sshr Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
ssra Dn, Dm, #uimm (1 <= #uimm <= 64)
ssra Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
ssra Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
ssra Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
ssra Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
ssra Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
ssra Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
ssra Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
ssubl Vn.H8, Vm.B8, Va.B8
ssubl Vn.S4, Vm.H4, Va.H4
ssubl Vn.D2, Vm.S2, Va.S2
ssubl2 Vn.H8, Vm.B16, Va.B16
ssubl2 Vn.S4, Vm.H8, Va.H8
ssubl2 Vn.D2, Vm.S4, Va.S4
ssubw Vn.H8, Vm.H8, Va.B8
ssubw Vn.S4, Vm.S4, Va.H4
ssubw Vn.D2, Vm.D2, Va.S2
ssubw2 Vn.H8, Vm.H8, Va.B16
ssubw2 Vn.S4, Vm.S4, Va.H8
ssubw2 Vn.D2, Vm.D2, Va.S4
st1 {Vn.B16 * 1}, [Xm|SP]
st1 {Vn.B8 * 1}, [Xm|SP]
st1 {Vn.H8 * 1}, [Xm|SP]
st1 {Vn.H4 * 1}, [Xm|SP]
st1 {Vn.S4 * 1}, [Xm|SP]
st1 {Vn.S2 * 1}, [Xm|SP]
st1 {Vn.D2 * 1}, [Xm|SP]
st1 {Vn.D1 * 1}, [Xm|SP]
st1 {Vn.B16 * 2}, [Xm|SP]
st1 {Vn.B8 * 2}, [Xm|SP]
st1 {Vn.H8 * 2}, [Xm|SP]
st1 {Vn.H4 * 2}, [Xm|SP]
st1 {Vn.S4 * 2}, [Xm|SP]
st1 {Vn.S2 * 2}, [Xm|SP]
st1 {Vn.D2 * 2}, [Xm|SP]
st1 {Vn.D1 * 2}, [Xm|SP]
st1 {Vn.B16 * 3}, [Xm|SP]
st1 {Vn.B8 * 3}, [Xm|SP]
st1 {Vn.H8 * 3}, [Xm|SP]
st1 {Vn.H4 * 3}, [Xm|SP]
st1 {Vn.S4 * 3}, [Xm|SP]
st1 {Vn.S2 * 3}, [Xm|SP]
st1 {Vn.D2 * 3}, [Xm|SP]
st1 {Vn.D1 * 3}, [Xm|SP]
st1 {Vn.B16 * 4}, [Xm|SP]
st1 {Vn.B8 * 4}, [Xm|SP]
st1 {Vn.H8 * 4}, [Xm|SP]
st1 {Vn.H4 * 4}, [Xm|SP]
st1 {Vn.S4 * 4}, [Xm|SP]
st1 {Vn.S2 * 4}, [Xm|SP]
st1 {Vn.D2 * 4}, [Xm|SP]
st1 {Vn.D1 * 4}, [Xm|SP]
st1 {Vn.B8 * 1}, [Xm|SP], 8
st1 {Vn.H4 * 1}, [Xm|SP], 8
st1 {Vn.S2 * 1}, [Xm|SP], 8
st1 {Vn.D1 * 1}, [Xm|SP], 8
st1 {Vn.B16 * 1}, [Xm|SP], 16
st1 {Vn.H8 * 1}, [Xm|SP], 16
st1 {Vn.S4 * 1}, [Xm|SP], 16
st1 {Vn.D2 * 1}, [Xm|SP], 16
st1 {Vn.B16 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H8 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H4 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S4 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S2 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D2 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D1 * 1}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 2}, [Xm|SP], 16
st1 {Vn.H4 * 2}, [Xm|SP], 16
st1 {Vn.S2 * 2}, [Xm|SP], 16
st1 {Vn.D1 * 2}, [Xm|SP], 16
st1 {Vn.B16 * 2}, [Xm|SP], 32
st1 {Vn.H8 * 2}, [Xm|SP], 32
st1 {Vn.S4 * 2}, [Xm|SP], 32
st1 {Vn.D2 * 2}, [Xm|SP], 32
st1 {Vn.B16 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H8 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H4 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S4 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S2 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D2 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D1 * 2}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 3}, [Xm|SP], 24
st1 {Vn.H4 * 3}, [Xm|SP], 24
st1 {Vn.S2 * 3}, [Xm|SP], 24
st1 {Vn.D1 * 3}, [Xm|SP], 24
st1 {Vn.B16 * 3}, [Xm|SP], 48
st1 {Vn.H8 * 3}, [Xm|SP], 48
st1 {Vn.S4 * 3}, [Xm|SP], 48
st1 {Vn.D2 * 3}, [Xm|SP], 48
st1 {Vn.B16 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H8 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H4 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S4 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S2 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D2 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D1 * 3}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 4}, [Xm|SP], 32
st1 {Vn.H4 * 4}, [Xm|SP], 32
st1 {Vn.S2 * 4}, [Xm|SP], 32
st1 {Vn.D1 * 4}, [Xm|SP], 32
st1 {Vn.B16 * 4}, [Xm|SP], 64
st1 {Vn.H8 * 4}, [Xm|SP], 64
st1 {Vn.S4 * 4}, [Xm|SP], 64
st1 {Vn.D2 * 4}, [Xm|SP], 64
st1 {Vn.B16 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B8 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H8 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.H4 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S4 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.S2 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D2 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.D1 * 4}, [Xm|SP], Xa (a is 0-30)
st1 {Vn.B * 1}[uimm], [Xm|SP] (#uimm < 16)
st1 {Vn.H * 1}[uimm], [Xm|SP] (#uimm < 8)
st1 {Vn.S * 1}[uimm], [Xm|SP] (#uimm < 4)
st1 {Vn.D * 1}[uimm], [Xm|SP] (#uimm < 2)
st1 {Vn.B * 1}[uimm], [Xm|SP], 1 (#uimm < 16)
st1 {Vn.B * 1}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
st1 {Vn.H * 1}[uimm], [Xm|SP], 2 (#uimm < 8)
st1 {Vn.H * 1}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
st1 {Vn.S * 1}[uimm], [Xm|SP], 4 (#uimm < 4)
st1 {Vn.S * 1}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
st1 {Vn.D * 1}[uimm], [Xm|SP], 8 (#uimm < 2)
st1 {Vn.D * 1}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
st2 {Vn.B16 * 2}, [Xm|SP]
st2 {Vn.B8 * 2}, [Xm|SP]
st2 {Vn.H8 * 2}, [Xm|SP]
st2 {Vn.H4 * 2}, [Xm|SP]
st2 {Vn.S4 * 2}, [Xm|SP]
st2 {Vn.S2 * 2}, [Xm|SP]
st2 {Vn.D2 * 2}, [Xm|SP]
st2 {Vn.B8 * 2}, [Xm|SP], 16
st2 {Vn.H4 * 2}, [Xm|SP], 16
st2 {Vn.S2 * 2}, [Xm|SP], 16
st2 {Vn.B16 * 2}, [Xm|SP], 32
st2 {Vn.H8 * 2}, [Xm|SP], 32
st2 {Vn.S4 * 2}, [Xm|SP], 32
st2 {Vn.D2 * 2}, [Xm|SP], 32
st2 {Vn.B16 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.B8 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.H8 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.H4 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.S4 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.S2 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.D2 * 2}, [Xm|SP], Xa (a is 0-30)
st2 {Vn.B * 2}[uimm], [Xm|SP] (#uimm < 16)
st2 {Vn.H * 2}[uimm], [Xm|SP] (#uimm < 8)
st2 {Vn.S * 2}[uimm], [Xm|SP] (#uimm < 4)
st2 {Vn.D * 2}[uimm], [Xm|SP] (#uimm < 2)
st2 {Vn.B * 2}[uimm], [Xm|SP], 2 (#uimm < 16)
st2 {Vn.B * 2}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
st2 {Vn.H * 2}[uimm], [Xm|SP], 4 (#uimm < 8)
st2 {Vn.H * 2}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
st2 {Vn.S * 2}[uimm], [Xm|SP], 8 (#uimm < 4)
st2 {Vn.S * 2}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
st2 {Vn.D * 2}[uimm], [Xm|SP], 16 (#uimm < 2)
st2 {Vn.D * 2}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
st3 {Vn.B16 * 3}, [Xm|SP]
st3 {Vn.B8 * 3}, [Xm|SP]
st3 {Vn.H8 * 3}, [Xm|SP]
st3 {Vn.H4 * 3}, [Xm|SP]
st3 {Vn.S4 * 3}, [Xm|SP]
st3 {Vn.S2 * 3}, [Xm|SP]
st3 {Vn.D2 * 3}, [Xm|SP]
st3 {Vn.B8 * 3}, [Xm|SP], 24
st3 {Vn.H4 * 3}, [Xm|SP], 24
st3 {Vn.S2 * 3}, [Xm|SP], 24
st3 {Vn.B16 * 3}, [Xm|SP], 48
st3 {Vn.H8 * 3}, [Xm|SP], 48
st3 {Vn.S4 * 3}, [Xm|SP], 48
st3 {Vn.D2 * 3}, [Xm|SP], 48
st3 {Vn.B16 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.B8 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.H8 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.H4 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.S4 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.S2 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.D2 * 3}, [Xm|SP], Xa (a is 0-30)
st3 {Vn.B * 3}[uimm], [Xm|SP] (#uimm < 16)
st3 {Vn.H * 3}[uimm], [Xm|SP] (#uimm < 8)
st3 {Vn.S * 3}[uimm], [Xm|SP] (#uimm < 4)
st3 {Vn.D * 3}[uimm], [Xm|SP] (#uimm < 2)
st3 {Vn.B * 3}[uimm], [Xm|SP], 3 (#uimm < 16)
st3 {Vn.B * 3}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
st3 {Vn.H * 3}[uimm], [Xm|SP], 6 (#uimm < 8)
st3 {Vn.H * 3}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
st3 {Vn.S * 3}[uimm], [Xm|SP], 12 (#uimm < 4)
st3 {Vn.S * 3}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
st3 {Vn.D * 3}[uimm], [Xm|SP], 24 (#uimm < 2)
st3 {Vn.D * 3}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
st4 {Vn.B16 * 4}, [Xm|SP]
st4 {Vn.B8 * 4}, [Xm|SP]
st4 {Vn.H8 * 4}, [Xm|SP]
st4 {Vn.H4 * 4}, [Xm|SP]
st4 {Vn.S4 * 4}, [Xm|SP]
st4 {Vn.S2 * 4}, [Xm|SP]
st4 {Vn.D2 * 4}, [Xm|SP]
st4 {Vn.B8 * 4}, [Xm|SP], 32
st4 {Vn.H4 * 4}, [Xm|SP], 32
st4 {Vn.S2 * 4}, [Xm|SP], 32
st4 {Vn.B16 * 4}, [Xm|SP], 64
st4 {Vn.H8 * 4}, [Xm|SP], 64
st4 {Vn.S4 * 4}, [Xm|SP], 64
st4 {Vn.D2 * 4}, [Xm|SP], 64
st4 {Vn.B16 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.B8 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.H8 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.H4 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.S4 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.S2 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.D2 * 4}, [Xm|SP], Xa (a is 0-30)
st4 {Vn.B * 4}[uimm], [Xm|SP] (#uimm < 16)
st4 {Vn.H * 4}[uimm], [Xm|SP] (#uimm < 8)
st4 {Vn.S * 4}[uimm], [Xm|SP] (#uimm < 4)
st4 {Vn.D * 4}[uimm], [Xm|SP] (#uimm < 2)
st4 {Vn.B * 4}[uimm], [Xm|SP], 4 (#uimm < 16)
st4 {Vn.B * 4}[uimm], [Xm|SP], Xa (#uimm < 16, a is 0-30)
st4 {Vn.H * 4}[uimm], [Xm|SP], 8 (#uimm < 8)
st4 {Vn.H * 4}[uimm], [Xm|SP], Xa (#uimm < 8, a is 0-30)
st4 {Vn.S * 4}[uimm], [Xm|SP], 16 (#uimm < 4)
st4 {Vn.S * 4}[uimm], [Xm|SP], Xa (#uimm < 4, a is 0-30)
st4 {Vn.D * 4}[uimm], [Xm|SP], 32 (#uimm < 2)
st4 {Vn.D * 4}[uimm], [Xm|SP], Xa (#uimm < 2, a is 0-30)
stadd Wn, [Xm|SP]
stadd Xn, [Xm|SP]
staddl Wn, [Xm|SP]
staddl Xn, [Xm|SP]
stclr Wn, [Xm|SP]
stclr Xn, [Xm|SP]
stclrl Wn, [Xm|SP]
stclrl Xn, [Xm|SP]
steor Wn, [Xm|SP]
steor Xn, [Xm|SP]
steorl Wn, [Xm|SP]
steorl Xn, [Xm|SP]
stllr Wn, [Xm|SP]
stllr Xn, [Xm|SP]
stlr Wn, [Xm|SP]
stlr Xn, [Xm|SP]
stlur Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stlur Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stlurb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stlurh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stlxp Wn, Wm, Wa, [Xb|SP]
stlxp Wn, Xm, Xa, [Xb|SP]
stlxr Wn, Wm, [Xa|SP]
stlxr Wn, Xm, [Xa|SP]
stnp Sn, Sm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
stnp Dn, Dm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
stnp Qn, Qm, [Xa|SP {, #simm } ] (-1024 <= #simm < 1024, #simm = 16 * N)
stnp Wn, Wm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
stnp Xn, Xm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
stp Sn, Sm, [Xa|SP], #simm (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP], #simm (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP], #simm (-1024 <= #simm < 1024, #simm = 16 * N)
stp Sn, Sm, [Xa|SP, #simm]! (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP, #simm]! (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP, #simm]! (-1024 <= #simm < 1024, #simm = 16 * N)
stp Sn, Sm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP {, #simm } ] (-1024 <= #simm < 1024, #simm = 16 * N)
stp Wn, Wm, [Xa|SP], #simm (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP], #simm (-512 <= #simm < 512, #simm = 8 * N)
stp Wn, Wm, [Xa|SP, #simm]! (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP, #simm]! (-512 <= #simm < 512, #simm = 8 * N)
stp Wn, Wm, [Xa|SP {, #simm } ] (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP {, #simm } ] (-512 <= #simm < 512, #simm = 8 * N)
str Bn, [Xm|SP], #simm (-256 <= #simm < 256)
str Hn, [Xm|SP], #simm (-256 <= #simm < 256)
str Sn, [Xm|SP], #simm (-256 <= #simm < 256)
str Dn, [Xm|SP], #simm (-256 <= #simm < 256)
str Qn, [Xm|SP], #simm (-256 <= #simm < 256)
str Bn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Hn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Sn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Dn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Qn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Bn, [Xm|SP {, #uimm } ] (#uimm < 4096)
str Hn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
str Sn, [Xm|SP {, #uimm } ] (#uimm < 16384, #uimm = 4 * N)
str Dn, [Xm|SP {, #uimm } ] (#uimm < 32768, #uimm = 8 * N)
str Qn, [Xm|SP {, #uimm } ] (#uimm < 65536, #uimm = 16 * N)
str Wn, [Xm|SP], #simm (-256 <= #simm < 256)
str Xn, [Xm|SP], #simm (-256 <= #simm < 256)
str Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Xn, [Xm|SP, #simm]! (-256 <= #simm < 256)
str Wn, [Xm|SP {, #uimm } ] (#uimm < 16384, #uimm = 4 * N)
str Xn, [Xm|SP {, #uimm } ] (#uimm < 32768, #uimm = 8 * N)
str Bn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
str Hn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
str Sn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 2])
str Dn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 3])
str Qn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 4])
str Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 2])
str Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 3])
strb Wn, [Xm|SP], #simm (-256 <= #simm < 256)
strb Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
strb Wn, [Xm|SP {, #uimm } ] (#uimm < 4096)
strb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 0])
strh Wn, [Xm|SP], #simm (-256 <= #simm < 256)
strh Wn, [Xm|SP, #simm]! (-256 <= #simm < 256)
strh Wn, [Xm|SP {, #uimm } ] (#uimm < 8192, #uimm = 2 * N)
strh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ] (#uimm = [0, 1])
stset Wn, [Xm|SP]
stset Xn, [Xm|SP]
stsetl Wn, [Xm|SP]
stsetl Xn, [Xm|SP]
stsmax Wn, [Xm|SP]
stsmax Xn, [Xm|SP]
stsmaxl Wn, [Xm|SP]
stsmaxl Xn, [Xm|SP]
stsmin Wn, [Xm|SP]
stsmin Xn, [Xm|SP]
stsminl Wn, [Xm|SP]
stsminl Xn, [Xm|SP]
sttr Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
sttr Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
sttrb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
sttrh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stumax Wn, [Xm|SP]
stumax Xn, [Xm|SP]
stumaxl Wn, [Xm|SP]
stumaxl Xn, [Xm|SP]
stumin Wn, [Xm|SP]
stumin Xn, [Xm|SP]
stuminl Wn, [Xm|SP]
stuminl Xn, [Xm|SP]
stur Bn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Hn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Sn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Dn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Qn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stur Xn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
sturb Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
sturh Wn, [Xm|SP {, #simm } ] (-256 <= #simm < 256)
stxp Wn, Wm, Wa, [Xb|SP]
stxp Wn, Xm, Xa, [Xb|SP]
stxr Wn, Wm, [Xa|SP]
stxr Wn, Xm, [Xa|SP]
sub Wn, Wm, Wa {, LSL|LSR|ASR #uimm } (#uimm < 32)
sub Xn, Xm, Xa {, LSL|LSR|ASR #uimm } (#uimm < 64)
sub Wn|WSP, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
sub Xn|SP, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } } (0 <= #uimm <= 4)
sub Xn|SP, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
sub Wn|WSP, Wm|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
sub Xn|SP, Xm|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
sub Dn, Dm, Da
sub Vn.B16, Vm.B16, Va.B16
sub Vn.B8, Vm.B8, Va.B8
sub Vn.H8, Vm.H8, Va.H8
sub Vn.H4, Vm.H4, Va.H4
sub Vn.S4, Vm.S4, Va.S4
sub Vn.S2, Vm.S2, Va.S2
sub Vn.D2, Vm.D2, Va.D2
subhn Vn.B8, Vm.H8, Va.H8
subhn Vn.H4, Vm.S4, Va.S4
subhn Vn.S2, Vm.D2, Va.D2
subhn2 Vn.B16, Vm.H8, Va.H8
subhn2 Vn.H8, Vm.S4, Va.S4
subhn2 Vn.S4, Vm.D2, Va.D2
subs Wn, Wm, Wa {, LSL|LSR|ASR #uimm } (#uimm < 32)
subs Xn, Xm, Xa {, LSL|LSR|ASR #uimm } (#uimm < 64)
subs Wn, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm } (0 <= #uimm <= 4)
subs Xn, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } } (0 <= #uimm <= 4)
subs Xn, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm } (0 <= #uimm <= 4)
subs Wn, Wm|WSP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
subs Xn, Xm|SP, #uimm {, LSL #uimm1 } (#uimm < 4096, #uimm1 = [0, 12])
suqadd Bn, Bm
suqadd Hn, Hm
suqadd Sn, Sm
suqadd Dn, Dm
suqadd Vn.B16, Vm.B16
suqadd Vn.B8, Vm.B8
suqadd Vn.H8, Vm.H8
suqadd Vn.H4, Vm.H4
suqadd Vn.S4, Vm.S4
suqadd Vn.S2, Vm.S2
suqadd Vn.D2, Vm.D2
svc #uimm (#uimm < 65536)
swp Wn, Wm, [Xa|SP]
swp Xn, Xm, [Xa|SP]
swpa Wn, Wm, [Xa|SP]
swpa Xn, Xm, [Xa|SP]
swpal Wn, Wm, [Xa|SP]
swpal Xn, Xm, [Xa|SP]
swpl Wn, Wm, [Xa|SP]
swpl Xn, Xm, [Xa|SP]
sxtl Vn.H8, Vm.B8
sxtl Vn.S4, Vm.H4
sxtl Vn.D2, Vm.S2
sxtl2 Vn.H8, Vm.B16
sxtl2 Vn.S4, Vm.H8
sxtl2 Vn.D2, Vm.S4
sys #uimm, control_reg, control_reg, #uimm1 {, Xn } (#uimm < 8, #uimm1 < 8)
sysl Xn, #uimm, control_reg, control_reg, #uimm1 (#uimm < 8, #uimm1 < 8)
tbl Vn.B16, {Vm.B16 * 2}, Va.B16
tbl Vn.B8, {Vm.B16 * 2}, Va.B8
tbl Vn.B16, {Vm.B16 * 3}, Va.B16
tbl Vn.B8, {Vm.B16 * 3}, Va.B8
tbl Vn.B16, {Vm.B16 * 4}, Va.B16
tbl Vn.B8, {Vm.B16 * 4}, Va.B8
tbl Vn.B16, {Vm.B16 * 1}, Va.B16
tbl Vn.B8, {Vm.B16 * 1}, Va.B8
tbnz Wn, #uimm, <offset> (#uimm < 32, offset is 14 bit, 4-byte aligned)
tbnz Xn, #uimm, <offset> (#uimm < 64, offset is 14 bit, 4-byte aligned)
tbx Vn.B16, {Vm.B16 * 2}, Va.B16
tbx Vn.B8, {Vm.B16 * 2}, Va.B8
tbx Vn.B16, {Vm.B16 * 3}, Va.B16
tbx Vn.B8, {Vm.B16 * 3}, Va.B8
tbx Vn.B16, {Vm.B16 * 4}, Va.B16
tbx Vn.B8, {Vm.B16 * 4}, Va.B8
tbx Vn.B16, {Vm.B16 * 1}, Va.B16
tbx Vn.B8, {Vm.B16 * 1}, Va.B8
tbz Wn, #uimm, <offset> (#uimm < 32, offset is 14 bit, 4-byte aligned)
tbz Xn, #uimm, <offset> (#uimm < 64, offset is 14 bit, 4-byte aligned)
trn1 Vn.B16, Vm.B16, Va.B16
trn1 Vn.B8, Vm.B8, Va.B8
trn1 Vn.H8, Vm.H8, Va.H8
trn1 Vn.H4, Vm.H4, Va.H4
trn1 Vn.S4, Vm.S4, Va.S4
trn1 Vn.S2, Vm.S2, Va.S2
trn1 Vn.D2, Vm.D2, Va.D2
trn2 Vn.B16, Vm.B16, Va.B16
trn2 Vn.B8, Vm.B8, Va.B8
trn2 Vn.H8, Vm.H8, Va.H8
trn2 Vn.H4, Vm.H4, Va.H4
trn2 Vn.S4, Vm.S4, Va.S4
trn2 Vn.S2, Vm.S2, Va.S2
trn2 Vn.D2, Vm.D2, Va.D2
tst Wn, #imm (#imm is a logical immediate)
tst Xn, #imm (#imm is a logical immediate)
tst Wn, Wm {, LSL|LSR|ASR|ROR #uimm } (#uimm < 32)
tst Xn, Xm {, LSL|LSR|ASR|ROR #uimm } (#uimm < 64)
uaba Vn.B16, Vm.B16, Va.B16
uaba Vn.B8, Vm.B8, Va.B8
uaba Vn.H8, Vm.H8, Va.H8
uaba Vn.H4, Vm.H4, Va.H4
uaba Vn.S4, Vm.S4, Va.S4
uaba Vn.S2, Vm.S2, Va.S2
uabal Vn.H8, Vm.B8, Va.B8
uabal Vn.S4, Vm.H4, Va.H4
uabal Vn.D2, Vm.S2, Va.S2
uabal2 Vn.H8, Vm.B16, Va.B16
uabal2 Vn.S4, Vm.H8, Va.H8
uabal2 Vn.D2, Vm.S4, Va.S4
uabd Vn.B16, Vm.B16, Va.B16
uabd Vn.B8, Vm.B8, Va.B8
uabd Vn.H8, Vm.H8, Va.H8
uabd Vn.H4, Vm.H4, Va.H4
uabd Vn.S4, Vm.S4, Va.S4
uabd Vn.S2, Vm.S2, Va.S2
uabdl Vn.H8, Vm.B8, Va.B8
uabdl Vn.S4, Vm.H4, Va.H4
uabdl Vn.D2, Vm.S2, Va.S2
uabdl2 Vn.H8, Vm.B16, Va.B16
uabdl2 Vn.S4, Vm.H8, Va.H8
uabdl2 Vn.D2, Vm.S4, Va.S4
uadalp Vn.H8, Vm.B16
uadalp Vn.H4, Vm.B8
uadalp Vn.S4, Vm.H8
uadalp Vn.S2, Vm.H4
uadalp Vn.D2, Vm.S4
uadalp Vn.D1, Vm.S2
uaddl Vn.H8, Vm.B8, Va.B8
uaddl Vn.S4, Vm.H4, Va.H4
uaddl Vn.D2, Vm.S2, Va.S2
uaddl2 Vn.H8, Vm.B16, Va.B16
uaddl2 Vn.S4, Vm.H8, Va.H8
uaddl2 Vn.D2, Vm.S4, Va.S4
uaddlp Vn.H8, Vm.B16
uaddlp Vn.H4, Vm.B8
uaddlp Vn.S4, Vm.H8
uaddlp Vn.S2, Vm.H4
uaddlp Vn.D2, Vm.S4
uaddlp Vn.D1, Vm.S2
uaddlv Hn, Vm.B16
uaddlv Hn, Vm.B8
uaddlv Sn, Vm.H8
uaddlv Sn, Vm.H4
uaddlv Dn, Vm.S4
uaddw Vn.H8, Vm.H8, Va.B8
uaddw Vn.S4, Vm.S4, Va.H4
uaddw Vn.D2, Vm.D2, Va.S2
uaddw2 Vn.H8, Vm.H8, Va.B16
uaddw2 Vn.S4, Vm.S4, Va.H8
uaddw2 Vn.D2, Vm.D2, Va.S4
ubfiz Wn, Wm, #uimm, #uimm1 (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
ubfiz Xn, Xm, #uimm, #uimm1 (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)
ubfm Wn, Wm, #uimm, #uimm1 (#uimm < 32, #uimm1 < 32)
ubfm Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
ubfx Wn, Wm, #uimm, #uimm1 (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
ubfx Xn, Xm, #uimm, #uimm1 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)
ucvtf Hn, Hm, #uimm (1 <= #uimm <= 16)
ucvtf Sn, Sm, #uimm (1 <= #uimm <= 32)
ucvtf Dn, Dm, #uimm (1 <= #uimm <= 64)
ucvtf Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
ucvtf Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
ucvtf Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
ucvtf Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
ucvtf Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
ucvtf Hn, Hm
ucvtf Sn, Sm
ucvtf Dn, Dm
ucvtf Vn.H8, Vm.H8
ucvtf Vn.H4, Vm.H4
ucvtf Vn.S4, Vm.S4
ucvtf Vn.S2, Vm.S2
ucvtf Vn.D2, Vm.D2
ucvtf Hn, Wm, #uimm (1 <= #uimm <= 32)
ucvtf Sn, Wm, #uimm (1 <= #uimm <= 32)
ucvtf Dn, Wm, #uimm (1 <= #uimm <= 32)
ucvtf Hn, Xm, #uimm (1 <= #uimm <= 64)
ucvtf Sn, Xm, #uimm (1 <= #uimm <= 64)
ucvtf Dn, Xm, #uimm (1 <= #uimm <= 64)
ucvtf Hn, Wm
ucvtf Sn, Wm
ucvtf Dn, Wm
ucvtf Hn, Xm
ucvtf Sn, Xm
ucvtf Dn, Xm
udf #uimm (#uimm < 65536)
udiv Wn, Wm, Wa
udiv Xn, Xm, Xa
udot Vn.S2, Vm.B8, Va.B4[uimm] (#uimm < 4)
udot Vn.S4, Vm.B16, Va.B4[uimm] (#uimm < 4)
udot Vn.S2, Vm.B8, Va.B8
udot Vn.S4, Vm.B16, Va.B16
uhadd Vn.B16, Vm.B16, Va.B16
uhadd Vn.B8, Vm.B8, Va.B8
uhadd Vn.H8, Vm.H8, Va.H8
uhadd Vn.H4, Vm.H4, Va.H4
uhadd Vn.S4, Vm.S4, Va.S4
uhadd Vn.S2, Vm.S2, Va.S2
uhsub Vn.B16, Vm.B16, Va.B16
uhsub Vn.B8, Vm.B8, Va.B8
uhsub Vn.H8, Vm.H8, Va.H8
uhsub Vn.H4, Vm.H4, Va.H4
uhsub Vn.S4, Vm.S4, Va.S4
uhsub Vn.S2, Vm.S2, Va.S2
umax Vn.B16, Vm.B16, Va.B16
umax Vn.B8, Vm.B8, Va.B8
umax Vn.H8, Vm.H8, Va.H8
umax Vn.H4, Vm.H4, Va.H4
umax Vn.S4, Vm.S4, Va.S4
umax Vn.S2, Vm.S2, Va.S2
umaxp Vn.B16, Vm.B16, Va.B16
umaxp Vn.B8, Vm.B8, Va.B8
umaxp Vn.H8, Vm.H8, Va.H8
umaxp Vn.H4, Vm.H4, Va.H4
umaxp Vn.S4, Vm.S4, Va.S4
umaxp Vn.S2, Vm.S2, Va.S2
umaxv Bn, Vm.B16
umaxv Bn, Vm.B8
umaxv Hn, Vm.H8
umaxv Hn, Vm.H4
umaxv Sn, Vm.S4
umin Vn.B16, Vm.B16, Va.B16
umin Vn.B8, Vm.B8, Va.B8
umin Vn.H8, Vm.H8, Va.H8
umin Vn.H4, Vm.H4, Va.H4
umin Vn.S4, Vm.S4, Va.S4
umin Vn.S2, Vm.S2, Va.S2
uminp Vn.B16, Vm.B16, Va.B16
uminp Vn.B8, Vm.B8, Va.B8
uminp Vn.H8, Vm.H8, Va.H8
uminp Vn.H4, Vm.H4, Va.H4
uminp Vn.S4, Vm.S4, Va.S4
uminp Vn.S2, Vm.S2, Va.S2
uminv Bn, Vm.B16
uminv Bn, Vm.B8
uminv Hn, Vm.H8
uminv Hn, Vm.H4
uminv Sn, Vm.S4
umlal Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
umlal Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
umlal Vn.H8, Vm.B8, Va.B8
umlal Vn.S4, Vm.H4, Va.H4
umlal Vn.D2, Vm.S2, Va.S2
umlal2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
umlal2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
umlal2 Vn.H8, Vm.B16, Va.B16
umlal2 Vn.S4, Vm.H8, Va.H8
umlal2 Vn.D2, Vm.S4, Va.S4
umlsl Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
umlsl Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
umlsl Vn.H8, Vm.B8, Va.B8
umlsl Vn.S4, Vm.H4, Va.H4
umlsl Vn.D2, Vm.S2, Va.S2
umlsl2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
umlsl2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
umlsl2 Vn.H8, Vm.B16, Va.B16
umlsl2 Vn.S4, Vm.H8, Va.H8
umlsl2 Vn.D2, Vm.S4, Va.S4
umov Wn, Vm.B[uimm] (#uimm < 16)
umov Wn, Vm.H[uimm] (#uimm < 8)
umov Wn, Vm.S[uimm] (#uimm < 4)
umov Xn, Vm.D[uimm] (#uimm < 2)
umull Vn.S4, Vm.H4, Va.H[uimm] (a is 0-15, #uimm < 8)
umull Vn.D2, Vm.S2, Va.S[uimm] (#uimm < 4)
umull Vn.H8, Vm.B8, Va.B8
umull Vn.S4, Vm.H4, Va.H4
umull Vn.D2, Vm.S2, Va.S2
umull Xn, Wm, Wa
umull2 Vn.S4, Vm.H8, Va.H[uimm] (a is 0-15, #uimm < 8)
umull2 Vn.D2, Vm.S4, Va.S[uimm] (#uimm < 4)
umull2 Vn.H8, Vm.B16, Va.B16
umull2 Vn.S4, Vm.H8, Va.H8
umull2 Vn.D2, Vm.S4, Va.S4
uqadd Bn, Bm, Ba
uqadd Hn, Hm, Ha
uqadd Sn, Sm, Sa
uqadd Dn, Dm, Da
uqadd Vn.B16, Vm.B16, Va.B16
uqadd Vn.B8, Vm.B8, Va.B8
uqadd Vn.H8, Vm.H8, Va.H8
uqadd Vn.H4, Vm.H4, Va.H4
uqadd Vn.S4, Vm.S4, Va.S4
uqadd Vn.S2, Vm.S2, Va.S2
uqadd Vn.D2, Vm.D2, Va.D2
uqrshl Bn, Bm, Ba
uqrshl Hn, Hm, Ha
uqrshl Sn, Sm, Sa
uqrshl Dn, Dm, Da
uqrshl Vn.B16, Vm.B16, Va.B16
uqrshl Vn.B8, Vm.B8, Va.B8
uqrshl Vn.H8, Vm.H8, Va.H8
uqrshl Vn.H4, Vm.H4, Va.H4
uqrshl Vn.S4, Vm.S4, Va.S4
uqrshl Vn.S2, Vm.S2, Va.S2
uqrshl Vn.D2, Vm.D2, Va.D2
uqrshrn Bn, Hm, #uimm (1 <= #uimm <= 8)
uqrshrn Hn, Sm, #uimm (1 <= #uimm <= 16)
uqrshrn Sn, Dm, #uimm (1 <= #uimm <= 32)
uqrshrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
uqrshrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
uqrshrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
uqrshrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
uqrshrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
uqrshrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
uqshl Bn, Bm, #uimm (#uimm < 8)
uqshl Hn, Hm, #uimm (#uimm < 16)
uqshl Sn, Sm, #uimm (#uimm < 32)
uqshl Dn, Dm, #uimm (#uimm < 64)
uqshl Vn.B16, Vm.B16, #uimm (#uimm < 8)
uqshl Vn.B8, Vm.B8, #uimm (#uimm < 8)
uqshl Vn.H8, Vm.H8, #uimm (#uimm < 16)
uqshl Vn.H4, Vm.H4, #uimm (#uimm < 16)
uqshl Vn.S4, Vm.S4, #uimm (#uimm < 32)
uqshl Vn.S2, Vm.S2, #uimm (#uimm < 32)
uqshl Vn.D2, Vm.D2, #uimm (#uimm < 64)
uqshl Bn, Bm, Ba
uqshl Hn, Hm, Ha
uqshl Sn, Sm, Sa
uqshl Dn, Dm, Da
uqshl Vn.B16, Vm.B16, Va.B16
uqshl Vn.B8, Vm.B8, Va.B8
uqshl Vn.H8, Vm.H8, Va.H8
uqshl Vn.H4, Vm.H4, Va.H4
uqshl Vn.S4, Vm.S4, Va.S4
uqshl Vn.S2, Vm.S2, Va.S2
uqshl Vn.D2, Vm.D2, Va.D2
uqshrn Bn, Hm, #uimm (1 <= #uimm <= 8)
uqshrn Hn, Sm, #uimm (1 <= #uimm <= 16)
uqshrn Sn, Dm, #uimm (1 <= #uimm <= 32)
uqshrn Vn.B8, Vm.H8, #uimm (1 <= #uimm <= 8)
uqshrn Vn.H4, Vm.S4, #uimm (1 <= #uimm <= 16)
uqshrn Vn.S2, Vm.D2, #uimm (1 <= #uimm <= 32)
uqshrn2 Vn.B16, Vm.H8, #uimm (1 <= #uimm <= 8)
uqshrn2 Vn.H8, Vm.S4, #uimm (1 <= #uimm <= 16)
uqshrn2 Vn.S4, Vm.D2, #uimm (1 <= #uimm <= 32)
uqsub Bn, Bm, Ba
uqsub Hn, Hm, Ha
uqsub Sn, Sm, Sa
uqsub Dn, Dm, Da
uqsub Vn.B16, Vm.B16, Va.B16
uqsub Vn.B8, Vm.B8, Va.B8
uqsub Vn.H8, Vm.H8, Va.H8
uqsub Vn.H4, Vm.H4, Va.H4
uqsub Vn.S4, Vm.S4, Va.S4
uqsub Vn.S2, Vm.S2, Va.S2
uqsub Vn.D2, Vm.D2, Va.D2
uqxtn Bn, Hm
uqxtn Hn, Sm
uqxtn Sn, Dm
uqxtn Vn.B8, Vm.H8
uqxtn Vn.H4, Vm.S4
uqxtn Vn.S2, Vm.D2
uqxtn2 Vn.B16, Vm.H8
uqxtn2 Vn.H8, Vm.S4
uqxtn2 Vn.S4, Vm.D2
urecpe Vn.S4, Vm.S4
urecpe Vn.S2, Vm.S2
urhadd Vn.B16, Vm.B16, Va.B16
urhadd Vn.B8, Vm.B8, Va.B8
urhadd Vn.H8, Vm.H8, Va.H8
urhadd Vn.H4, Vm.H4, Va.H4
urhadd Vn.S4, Vm.S4, Va.S4
urhadd Vn.S2, Vm.S2, Va.S2
urshl Dn, Dm, Da
urshl Vn.B16, Vm.B16, Va.B16
urshl Vn.B8, Vm.B8, Va.B8
urshl Vn.H8, Vm.H8, Va.H8
urshl Vn.H4, Vm.H4, Va.H4
urshl Vn.S4, Vm.S4, Va.S4
urshl Vn.S2, Vm.S2, Va.S2
urshl Vn.D2, Vm.D2, Va.D2
urshr Dn, Dm, #uimm (1 <= #uimm <= 64)
urshr Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
urshr Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
urshr Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
urshr Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
urshr Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
urshr Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
urshr Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
ursqrte Vn.S4, Vm.S4
ursqrte Vn.S2, Vm.S2
ursra Dn, Dm, #uimm (1 <= #uimm <= 64)
ursra Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
ursra Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
ursra Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
ursra Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
ursra Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
ursra Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
ursra Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
ushl Dn, Dm, Da
ushl Vn.B16, Vm.B16, Va.B16
ushl Vn.B8, Vm.B8, Va.B8
ushl Vn.H8, Vm.H8, Va.H8
ushl Vn.H4, Vm.H4, Va.H4
ushl Vn.S4, Vm.S4, Va.S4
ushl Vn.S2, Vm.S2, Va.S2
ushl Vn.D2, Vm.D2, Va.D2
ushll Vn.H8, Vm.B8, #uimm (#uimm < 8)
ushll Vn.S4, Vm.H4, #uimm (#uimm < 16)
ushll Vn.D2, Vm.S2, #uimm (#uimm < 32)
ushll2 Vn.H8, Vm.B16, #uimm (#uimm < 8)
ushll2 Vn.S4, Vm.H8, #uimm (#uimm < 16)
ushll2 Vn.D2, Vm.S4, #uimm (#uimm < 32)
ushr Dn, Dm, #uimm (1 <= #uimm <= 64)
ushr Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
ushr Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
ushr Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
ushr Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
ushr Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
ushr Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
ushr Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
usqadd Bn, Bm
usqadd Hn, Hm
usqadd Sn, Sm
usqadd Dn, Dm
usqadd Vn.B16, Vm.B16
usqadd Vn.B8, Vm.B8
usqadd Vn.H8, Vm.H8
usqadd Vn.H4, Vm.H4
usqadd Vn.S4, Vm.S4
usqadd Vn.S2, Vm.S2
usqadd Vn.D2, Vm.D2
usra Dn, Dm, #uimm (1 <= #uimm <= 64)
usra Vn.B16, Vm.B16, #uimm (1 <= #uimm <= 8)
usra Vn.B8, Vm.B8, #uimm (1 <= #uimm <= 8)
usra Vn.H8, Vm.H8, #uimm (1 <= #uimm <= 16)
usra Vn.H4, Vm.H4, #uimm (1 <= #uimm <= 16)
usra Vn.S4, Vm.S4, #uimm (1 <= #uimm <= 32)
usra Vn.S2, Vm.S2, #uimm (1 <= #uimm <= 32)
usra Vn.D2, Vm.D2, #uimm (1 <= #uimm <= 64)
usubl Vn.H8, Vm.B8, Va.B8
usubl Vn.S4, Vm.H4, Va.H4
usubl Vn.D2, Vm.S2, Va.S2
usubl2 Vn.H8, Vm.B16, Va.B16
usubl2 Vn.S4, Vm.H8, Va.H8
usubl2 Vn.D2, Vm.S4, Va.S4
usubw Vn.H8, Vm.H8, Va.B8
usubw Vn.S4, Vm.S4, Va.H4
usubw Vn.D2, Vm.D2, Va.S2
usubw2 Vn.H8, Vm.H8, Va.B16
usubw2 Vn.S4, Vm.S4, Va.H8
usubw2 Vn.D2, Vm.D2, Va.S4
uxtl Vn.H8, Vm.B8
uxtl Vn.S4, Vm.H4
uxtl Vn.D2, Vm.S2
uxtl2 Vn.H8, Vm.B16
uxtl2 Vn.S4, Vm.H8
uxtl2 Vn.D2, Vm.S4
uzp1 Vn.B16, Vm.B16, Va.B16
uzp1 Vn.B8, Vm.B8, Va.B8
uzp1 Vn.H8, Vm.H8, Va.H8
uzp1 Vn.H4, Vm.H4, Va.H4
uzp1 Vn.S4, Vm.S4, Va.S4
uzp1 Vn.S2, Vm.S2, Va.S2
uzp1 Vn.D2, Vm.D2, Va.D2
uzp2 Vn.B16, Vm.B16, Va.B16
uzp2 Vn.B8, Vm.B8, Va.B8
uzp2 Vn.H8, Vm.H8, Va.H8
uzp2 Vn.H4, Vm.H4, Va.H4
uzp2 Vn.S4, Vm.S4, Va.S4
uzp2 Vn.S2, Vm.S2, Va.S2
uzp2 Vn.D2, Vm.D2, Va.D2
xar Vn.D2, Vm.D2, Va.D2, #uimm (#uimm < 64)
xtn Vn.B8, Vm.H8
xtn Vn.H4, Vm.S4
xtn Vn.S2, Vm.D2
xtn2 Vn.B16, Vm.H8
xtn2 Vn.H8, Vm.S4
xtn2 Vn.S4, Vm.D2
zip1 Vn.B16, Vm.B16, Va.B16
zip1 Vn.B8, Vm.B8, Va.B8
zip1 Vn.H8, Vm.H8, Va.H8
zip1 Vn.H4, Vm.H4, Va.H4
zip1 Vn.S4, Vm.S4, Va.S4
zip1 Vn.S2, Vm.S2, Va.S2
zip1 Vn.D2, Vm.D2, Va.D2
zip2 Vn.B16, Vm.B16, Va.B16
zip2 Vn.B8, Vm.B8, Va.B8
zip2 Vn.H8, Vm.H8, Va.H8
zip2 Vn.H4, Vm.H4, Va.H4
zip2 Vn.S4, Vm.S4, Va.S4
zip2 Vn.S2, Vm.S2, Va.S2
zip2 Vn.D2, Vm.D2, Va.D2