Instruction Reference

abs

abs Dn, Dm
abs Vn.B16, Vm.B16
abs Vn.B8, Vm.B8
abs Vn.H8, Vm.H8
abs Vn.H4, Vm.H4
abs Vn.S4, Vm.S4
abs Vn.S2, Vm.S2
abs Vn.D2, Vm.D2

adc

adc Wn, Wm, Wa
adc Xn, Xm, Xa

adcs

adcs Wn, Wm, Wa
adcs Xn, Xm, Xa

add

add Wn, Wm, Wa {, LSL|LSR|ASR #uimm }                                               (#uimm < 32)
add Xn, Xm, Xa {, LSL|LSR|ASR #uimm }                                               (#uimm < 64)
add Wn|WSP, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                      (0 <= #uimm <= 4)
add Xn|SP, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } }                          (0 <= #uimm <= 4)
add Xn|SP, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm }                                  (0 <= #uimm <= 4)
add Wn|WSP, Wm|WSP, #uimm {, LSL #uimm1 }                       (#uimm < 4096, #uimm1 = [0, 12])
add Xn|SP, Xm|SP, #uimm {, LSL #uimm1 }                         (#uimm < 4096, #uimm1 = [0, 12])
add Dn, Dm, Da
add Vn.B16, Vm.B16, Va.B16
add Vn.B8, Vm.B8, Va.B8
add Vn.H8, Vm.H8, Va.H8
add Vn.H4, Vm.H4, Va.H4
add Vn.S4, Vm.S4, Va.S4
add Vn.S2, Vm.S2, Va.S2
add Vn.D2, Vm.D2, Va.D2

addhn

addhn Vn.B8, Vm.H8, Va.H8
addhn Vn.H4, Vm.S4, Va.S4
addhn Vn.S2, Vm.D2, Va.D2

addhn2

addhn2 Vn.B16, Vm.H8, Va.H8
addhn2 Vn.H8, Vm.S4, Va.S4
addhn2 Vn.S4, Vm.D2, Va.D2

addp

addp Dn, Vm.D2
addp Vn.B16, Vm.B16, Va.B16
addp Vn.B8, Vm.B8, Va.B8
addp Vn.H8, Vm.H8, Va.H8
addp Vn.H4, Vm.H4, Va.H4
addp Vn.S4, Vm.S4, Va.S4
addp Vn.S2, Vm.S2, Va.S2
addp Vn.D2, Vm.D2, Va.D2

adds

adds Wn, Wm, Wa {, LSL|LSR|ASR #uimm }                                              (#uimm < 32)
adds Xn, Xm, Xa {, LSL|LSR|ASR #uimm }                                              (#uimm < 64)
adds Wn, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                         (0 <= #uimm <= 4)
adds Xn, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } }                            (0 <= #uimm <= 4)
adds Xn, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm }                                    (0 <= #uimm <= 4)
adds Wn, Wm|WSP, #uimm {, LSL #uimm1 }                          (#uimm < 4096, #uimm1 = [0, 12])
adds Xn, Xm|SP, #uimm {, LSL #uimm1 }                           (#uimm < 4096, #uimm1 = [0, 12])

addv

addv Bn, Vm.B16
addv Bn, Vm.B8
addv Hn, Vm.H8
addv Hn, Vm.H4
addv Sn, Vm.S4

adr

adr Xn, <offset>                                                              (offset is 21 bit)

adrp

adrp Xn, <offset>                                            (offset is 21 bit, 4K-page aligned)

aesd

aesd Vn.B16, Vm.B16

aese

aese Vn.B16, Vm.B16

aesimc

aesimc Vn.B16, Vm.B16

aesmc

aesmc Vn.B16, Vm.B16

and

and Vn.B16, Vm.B16, Va.B16
and Vn.B8, Vm.B8, Va.B8
and Wn|WSP, Wm, #imm                                               (#imm is a logical immediate)
and Xn|SP, Xm, #imm                                                (#imm is a logical immediate)
and Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
and Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

ands

ands Wn, Wm, #imm                                                  (#imm is a logical immediate)
ands Xn, Xm, #imm                                                  (#imm is a logical immediate)
ands Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                          (#uimm < 32)
ands Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                          (#uimm < 64)

asr

asr Wn, Wm, Wa
asr Xn, Xm, Xa
asr Wn, Wm, #uimm                                                                   (#uimm < 32)
asr Xn, Xm, #uimm                                                                   (#uimm < 64)

asrv

asrv Wn, Wm, Wa
asrv Xn, Xm, Xa

at

at at_op, Xn

autda

autda Xn, Xm|SP

autdb

autdb Xn, Xm|SP

autdza

autdza Xn

autdzb

autdzb Xn

autia

autia Xn, Xm|SP

autia1716

autia1716

autiasp

autiasp

autiaz

autiaz

autib

autib Xn, Xm|SP

autib1716

autib1716

autibsp

autibsp

autibz

autibz

autiza

autiza Xn

autizb

autizb Xn

b

b.<cond> <offset>                                             (offset is 19 bit, 4-byte aligned)
b <offset>                                                    (offset is 26 bit, 4-byte aligned)

bcax

bcax Vn.B16, Vm.B16, Va.B16, Vb.B16

bfc

bfc Wn, #uimm, #uimm1                                (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfc Xn, #uimm, #uimm1                                (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)

bfi

bfi Wn, Wm, #uimm, #uimm1                            (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfi Xn, Xm, #uimm, #uimm1                            (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)

bfm

bfm Wn, Wm, #uimm, #uimm1                                              (#uimm < 32, #uimm1 < 32)
bfm Xn, Xm, #uimm, #uimm1                                 (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

bfxil

bfxil Wn, Wm, #uimm, #uimm1                               (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
bfxil Xn, Xm, #uimm, #uimm1                               (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

bic

bic Vn.H8, #uimm {, LSL #uimm1 }                                  (#uimm < 256, #uimm1 = [0, 8])
bic Vn.H4, #uimm {, LSL #uimm1 }                                  (#uimm < 256, #uimm1 = [0, 8])
bic Vn.S4, #uimm {, LSL #uimm1 }                          (#uimm < 256, #uimm1 = [0, 8, 16, 24])
bic Vn.S2, #uimm {, LSL #uimm1 }                          (#uimm < 256, #uimm1 = [0, 8, 16, 24])
bic Vn.B16, Vm.B16, Va.B16
bic Vn.B8, Vm.B8, Va.B8
bic Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
bic Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

bics

bics Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                          (#uimm < 32)
bics Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                          (#uimm < 64)

bif

bif Vn.B16, Vm.B16, Va.B16
bif Vn.B8, Vm.B8, Va.B8

bit

bit Vn.B16, Vm.B16, Va.B16
bit Vn.B8, Vm.B8, Va.B8

bl

bl <offset>                                                   (offset is 26 bit, 4-byte aligned)

blr

blr Xn

blraa

blraa Xn, Xm|SP

blraaz

blraaz Xn

blrab

blrab Xn, Xm|SP

blrabz

blrabz Xn

br

br Xn

braa

braa Xn, Xm|SP

braaz

braaz Xn

brab

brab Xn, Xm|SP

brabz

brabz Xn

brk

brk #uimm                                                                        (#uimm < 65536)

bsl

bsl Vn.B16, Vm.B16, Va.B16
bsl Vn.B8, Vm.B8, Va.B8

cas

cas Wn, Wm, [Xa|SP]
cas Xn, Xm, [Xa|SP]

casa

casa Wn, Wm, [Xa|SP]
casa Xn, Xm, [Xa|SP]

casab

casab Wn, Wm, [Xa|SP]

casah

casah Wn, Wm, [Xa|SP]

casal

casal Wn, Wm, [Xa|SP]
casal Xn, Xm, [Xa|SP]

casalb

casalb Wn, Wm, [Xa|SP]

casalh

casalh Wn, Wm, [Xa|SP]

casb

casb Wn, Wm, [Xa|SP]

cash

cash Wn, Wm, [Xa|SP]

casl

casl Wn, Wm, [Xa|SP]
casl Xn, Xm, [Xa|SP]

caslb

caslb Wn, Wm, [Xa|SP]

caslh

caslh Wn, Wm, [Xa|SP]

casp

casp Wn, Wn+1, Wm, Wm+1, [Xa|SP]                                          (n is even, m is even)
casp Xn, Xn+1, Xm, Xm+1, [Xa|SP]                                          (n is even, m is even)

caspa

caspa Wn, Wn+1, Wm, Wm+1, [Xa|SP]                                         (n is even, m is even)
caspa Xn, Xn+1, Xm, Xm+1, [Xa|SP]                                         (n is even, m is even)

caspal

caspal Wn, Wn+1, Wm, Wm+1, [Xa|SP]                                        (n is even, m is even)
caspal Xn, Xn+1, Xm, Xm+1, [Xa|SP]                                        (n is even, m is even)

caspl

caspl Wn, Wn+1, Wm, Wm+1, [Xa|SP]                                         (n is even, m is even)
caspl Xn, Xn+1, Xm, Xm+1, [Xa|SP]                                         (n is even, m is even)

cbnz

cbnz Wn, <offset>                                             (offset is 19 bit, 4-byte aligned)
cbnz Xn, <offset>                                             (offset is 19 bit, 4-byte aligned)

cbz

cbz Wn, <offset>                                              (offset is 19 bit, 4-byte aligned)
cbz Xn, <offset>                                              (offset is 19 bit, 4-byte aligned)

ccmn

ccmn Wn, #uimm, #uimm1, <cond>                                         (#uimm < 32, #uimm1 < 16)
ccmn Xn, #uimm, #uimm1, <cond>                                         (#uimm < 32, #uimm1 < 16)
ccmn Wn, Wm, #uimm, <cond>                                                          (#uimm < 16)
ccmn Xn, Xm, #uimm, <cond>                                                          (#uimm < 16)

ccmp

ccmp Wn, #uimm, #uimm1, <cond>                                         (#uimm < 32, #uimm1 < 16)
ccmp Xn, #uimm, #uimm1, <cond>                                         (#uimm < 32, #uimm1 < 16)
ccmp Wn, Wm, #uimm, <cond>                                                          (#uimm < 16)
ccmp Xn, Xm, #uimm, <cond>                                                          (#uimm < 16)

cfinv

cfinv

cfp

cfp rctx, Xn

cinc

cinc Wn, Wm, <cond>
cinc Xn, Xm, <cond>

cinv

cinv Wn, Wm, <cond>
cinv Xn, Xm, <cond>

clrex

clrex #uimm                                                                         (#uimm < 16)
clrex

cls

cls Vn.B16, Vm.B16
cls Vn.B8, Vm.B8
cls Vn.H8, Vm.H8
cls Vn.H4, Vm.H4
cls Vn.S4, Vm.S4
cls Vn.S2, Vm.S2
cls Wn, Wm
cls Xn, Xm

clz

clz Vn.B16, Vm.B16
clz Vn.B8, Vm.B8
clz Vn.H8, Vm.H8
clz Vn.H4, Vm.H4
clz Vn.S4, Vm.S4
clz Vn.S2, Vm.S2
clz Wn, Wm
clz Xn, Xm

cmeq

cmeq Dn, Dm, Da
cmeq Vn.B16, Vm.B16, Va.B16
cmeq Vn.B8, Vm.B8, Va.B8
cmeq Vn.H8, Vm.H8, Va.H8
cmeq Vn.H4, Vm.H4, Va.H4
cmeq Vn.S4, Vm.S4, Va.S4
cmeq Vn.S2, Vm.S2, Va.S2
cmeq Vn.D2, Vm.D2, Va.D2
cmeq Dn, Dm, 0
cmeq Vn.B16, Vm.B16, 0
cmeq Vn.B8, Vm.B8, 0
cmeq Vn.H8, Vm.H8, 0
cmeq Vn.H4, Vm.H4, 0
cmeq Vn.S4, Vm.S4, 0
cmeq Vn.S2, Vm.S2, 0
cmeq Vn.D2, Vm.D2, 0

cmge

cmge Dn, Dm, Da
cmge Vn.B16, Vm.B16, Va.B16
cmge Vn.B8, Vm.B8, Va.B8
cmge Vn.H8, Vm.H8, Va.H8
cmge Vn.H4, Vm.H4, Va.H4
cmge Vn.S4, Vm.S4, Va.S4
cmge Vn.S2, Vm.S2, Va.S2
cmge Vn.D2, Vm.D2, Va.D2
cmge Dn, Dm, 0
cmge Vn.B16, Vm.B16, 0
cmge Vn.B8, Vm.B8, 0
cmge Vn.H8, Vm.H8, 0
cmge Vn.H4, Vm.H4, 0
cmge Vn.S4, Vm.S4, 0
cmge Vn.S2, Vm.S2, 0
cmge Vn.D2, Vm.D2, 0

cmgt

cmgt Dn, Dm, Da
cmgt Vn.B16, Vm.B16, Va.B16
cmgt Vn.B8, Vm.B8, Va.B8
cmgt Vn.H8, Vm.H8, Va.H8
cmgt Vn.H4, Vm.H4, Va.H4
cmgt Vn.S4, Vm.S4, Va.S4
cmgt Vn.S2, Vm.S2, Va.S2
cmgt Vn.D2, Vm.D2, Va.D2
cmgt Dn, Dm, 0
cmgt Vn.B16, Vm.B16, 0
cmgt Vn.B8, Vm.B8, 0
cmgt Vn.H8, Vm.H8, 0
cmgt Vn.H4, Vm.H4, 0
cmgt Vn.S4, Vm.S4, 0
cmgt Vn.S2, Vm.S2, 0
cmgt Vn.D2, Vm.D2, 0

cmhi

cmhi Dn, Dm, Da
cmhi Vn.B16, Vm.B16, Va.B16
cmhi Vn.B8, Vm.B8, Va.B8
cmhi Vn.H8, Vm.H8, Va.H8
cmhi Vn.H4, Vm.H4, Va.H4
cmhi Vn.S4, Vm.S4, Va.S4
cmhi Vn.S2, Vm.S2, Va.S2
cmhi Vn.D2, Vm.D2, Va.D2

cmhs

cmhs Dn, Dm, Da
cmhs Vn.B16, Vm.B16, Va.B16
cmhs Vn.B8, Vm.B8, Va.B8
cmhs Vn.H8, Vm.H8, Va.H8
cmhs Vn.H4, Vm.H4, Va.H4
cmhs Vn.S4, Vm.S4, Va.S4
cmhs Vn.S2, Vm.S2, Va.S2
cmhs Vn.D2, Vm.D2, Va.D2

cmle

cmle Dn, Dm, 0
cmle Vn.B16, Vm.B16, 0
cmle Vn.B8, Vm.B8, 0
cmle Vn.H8, Vm.H8, 0
cmle Vn.H4, Vm.H4, 0
cmle Vn.S4, Vm.S4, 0
cmle Vn.S2, Vm.S2, 0
cmle Vn.D2, Vm.D2, 0

cmlt

cmlt Dn, Dm, 0
cmlt Vn.B16, Vm.B16, 0
cmlt Vn.B8, Vm.B8, 0
cmlt Vn.H8, Vm.H8, 0
cmlt Vn.H4, Vm.H4, 0
cmlt Vn.S4, Vm.S4, 0
cmlt Vn.S2, Vm.S2, 0
cmlt Vn.D2, Vm.D2, 0

cmn

cmn Wn, Wm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 32)
cmn Xn, Xm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 64)
cmn Wn|WSP, Wm {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                              (0 <= #uimm <= 4)
cmn Xn|SP, Wm, UXT[BHW]|SXT[BHW] { #uimm }                                     (0 <= #uimm <= 4)
cmn Xn|SP, Xm {, LSL|UXTX|SXTX #uimm }                                         (0 <= #uimm <= 4)
cmn Wn|WSP, #uimm {, LSL #uimm1 }                               (#uimm < 4096, #uimm1 = [0, 12])
cmn Xn|SP, #uimm {, LSL #uimm1 }                                (#uimm < 4096, #uimm1 = [0, 12])

cmp

cmp Wn, Wm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 32)
cmp Xn, Xm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 64)
cmp Wn|WSP, Wm {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                              (0 <= #uimm <= 4)
cmp Xn|SP, Wm, UXT[BHW]|SXT[BHW] { #uimm }                                     (0 <= #uimm <= 4)
cmp Xn|SP, Xm {, LSL|UXTX|SXTX #uimm }                                         (0 <= #uimm <= 4)
cmp Wn|WSP, #uimm {, LSL #uimm1 }                               (#uimm < 4096, #uimm1 = [0, 12])
cmp Xn|SP, #uimm {, LSL #uimm1 }                                (#uimm < 4096, #uimm1 = [0, 12])

cmtst

cmtst Dn, Dm, Da
cmtst Vn.B16, Vm.B16, Va.B16
cmtst Vn.B8, Vm.B8, Va.B8
cmtst Vn.H8, Vm.H8, Va.H8
cmtst Vn.H4, Vm.H4, Va.H4
cmtst Vn.S4, Vm.S4, Va.S4
cmtst Vn.S2, Vm.S2, Va.S2
cmtst Vn.D2, Vm.D2, Va.D2

cneg

cneg Wn, Wm, <cond>
cneg Xn, Xm, <cond>

cnt

cnt Vn.B16, Vm.B16
cnt Vn.B8, Vm.B8

cpp

cpp rctx, Xn

crc32b

crc32b Wn, Wm, Wa

crc32cb

crc32cb Wn, Wm, Wa

crc32ch

crc32ch Wn, Wm, Wa

crc32cw

crc32cw Wn, Wm, Wa

crc32cx

crc32cx Wn, Wm, Xa

crc32h

crc32h Wn, Wm, Wa

crc32w

crc32w Wn, Wm, Wa

crc32x

crc32x Wn, Wm, Xa

csdb

csdb

csel

csel Wn, Wm, Wa, <cond>
csel Xn, Xm, Xa, <cond>

cset

cset Wn, <cond>
cset Xn, <cond>

csetm

csetm Wn, <cond>
csetm Xn, <cond>

csinc

csinc Wn, Wm, Wa, <cond>
csinc Xn, Xm, Xa, <cond>

csinv

csinv Wn, Wm, Wa, <cond>
csinv Xn, Xm, Xa, <cond>

csneg

csneg Wn, Wm, Wa, <cond>
csneg Xn, Xm, Xa, <cond>

dc

dc dc_op, Xn

dcps1

dcps1 { #uimm }                                                                  (#uimm < 65536)

dcps2

dcps2 { #uimm }                                                                  (#uimm < 65536)

dcps3

dcps3 { #uimm }                                                                  (#uimm < 65536)

dmb

dmb barrier_op
dmb #uimm                                                                           (#uimm < 16)

drps

drps

dsb

dsb barrier_op
dsb #uimm                                                                           (#uimm < 16)

dup

dup Bn, Vm.B[uimm]                                                                  (#uimm < 16)
dup Hn, Vm.H[uimm]                                                                   (#uimm < 8)
dup Sn, Vm.S[uimm]                                                                   (#uimm < 4)
dup Dn, Vm.D[uimm]                                                                   (#uimm < 2)
dup Vn.B16, Vm.B[uimm]                                                              (#uimm < 16)
dup Vn.B8, Vm.B[uimm]                                                               (#uimm < 16)
dup Vn.H8, Vm.H[uimm]                                                                (#uimm < 8)
dup Vn.H4, Vm.H[uimm]                                                                (#uimm < 8)
dup Vn.S4, Vm.S[uimm]                                                                (#uimm < 4)
dup Vn.S2, Vm.S[uimm]                                                                (#uimm < 4)
dup Vn.D2, Vm.D[uimm]                                                                (#uimm < 2)
dup Vn.B16, Wm
dup Vn.B8, Wm
dup Vn.H8, Wm
dup Vn.H4, Wm
dup Vn.S4, Wm
dup Vn.S2, Wm
dup Vn.D2, Xm

dvp

dvp rctx, Xn

eon

eon Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
eon Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

eor

eor Vn.B16, Vm.B16, Va.B16
eor Vn.B8, Vm.B8, Va.B8
eor Wn|WSP, Wm, #imm                                               (#imm is a logical immediate)
eor Xn|SP, Xm, #imm                                                (#imm is a logical immediate)
eor Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
eor Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

eor3

eor3 Vn.B16, Vm.B16, Va.B16, Vb.B16

eret

eret

eretaa

eretaa

eretab

eretab

esb

esb

ext

ext Vn.B8, Vm.B8, Va.B8, #uimm                                                       (#uimm < 8)
ext Vn.B16, Vm.B16, Va.B16, #uimm                                                   (#uimm < 16)

extr

extr Wn, Wm, Wa, #uimm                                                              (#uimm < 32)
extr Xn, Xm, Xa, #uimm                                                              (#uimm < 64)

fabd

fabd Hn, Hm, Ha
fabd Sn, Sm, Sa
fabd Dn, Dm, Da
fabd Vn.H8, Vm.H8, Va.H8
fabd Vn.H4, Vm.H4, Va.H4
fabd Vn.S4, Vm.S4, Va.S4
fabd Vn.S2, Vm.S2, Va.S2
fabd Vn.D2, Vm.D2, Va.D2

fabs

fabs Vn.H8, Vm.H8
fabs Vn.H4, Vm.H4
fabs Vn.S4, Vm.S4
fabs Vn.S2, Vm.S2
fabs Vn.D2, Vm.D2
fabs Hn, Hm
fabs Sn, Sm
fabs Dn, Dm

facge

facge Hn, Hm, Ha
facge Sn, Sm, Sa
facge Dn, Dm, Da
facge Vn.H8, Vm.H8, Va.H8
facge Vn.H4, Vm.H4, Va.H4
facge Vn.S4, Vm.S4, Va.S4
facge Vn.S2, Vm.S2, Va.S2
facge Vn.D2, Vm.D2, Va.D2

facgt

facgt Hn, Hm, Ha
facgt Sn, Sm, Sa
facgt Dn, Dm, Da
facgt Vn.H8, Vm.H8, Va.H8
facgt Vn.H4, Vm.H4, Va.H4
facgt Vn.S4, Vm.S4, Va.S4
facgt Vn.S2, Vm.S2, Va.S2
facgt Vn.D2, Vm.D2, Va.D2

fadd

fadd Vn.H8, Vm.H8, Va.H8
fadd Vn.H4, Vm.H4, Va.H4
fadd Vn.S4, Vm.S4, Va.S4
fadd Vn.S2, Vm.S2, Va.S2
fadd Vn.D2, Vm.D2, Va.D2
fadd Hn, Hm, Ha
fadd Sn, Sm, Sa
fadd Dn, Dm, Da

faddp

faddp Hn, Vm.H2
faddp Sn, Vm.S2
faddp Dn, Vm.D2
faddp Vn.H8, Vm.H8, Va.H8
faddp Vn.H4, Vm.H4, Va.H4
faddp Vn.S4, Vm.S4, Va.S4
faddp Vn.S2, Vm.S2, Va.S2
faddp Vn.D2, Vm.D2, Va.D2

fcadd

fcadd Vn.H8, Vm.H8, Va.H8, #uimm                                             (#uimm = [90, 270])
fcadd Vn.H4, Vm.H4, Va.H4, #uimm                                             (#uimm = [90, 270])
fcadd Vn.S4, Vm.S4, Va.S4, #uimm                                             (#uimm = [90, 270])
fcadd Vn.S2, Vm.S2, Va.S2, #uimm                                             (#uimm = [90, 270])
fcadd Vn.D2, Vm.D2, Va.D2, #uimm                                             (#uimm = [90, 270])

fccmp

fccmp Hn, Hm, #uimm, <cond>                                                         (#uimm < 16)
fccmp Sn, Sm, #uimm, <cond>                                                         (#uimm < 16)
fccmp Dn, Dm, #uimm, <cond>                                                         (#uimm < 16)

fccmpe

fccmpe Hn, Hm, #uimm, <cond>                                                        (#uimm < 16)
fccmpe Sn, Sm, #uimm, <cond>                                                        (#uimm < 16)
fccmpe Dn, Dm, #uimm, <cond>                                                        (#uimm < 16)

fcmeq

fcmeq Hn, Hm, Ha
fcmeq Sn, Sm, Sa
fcmeq Dn, Dm, Da
fcmeq Vn.H8, Vm.H8, Va.H8
fcmeq Vn.H4, Vm.H4, Va.H4
fcmeq Vn.S4, Vm.S4, Va.S4
fcmeq Vn.S2, Vm.S2, Va.S2
fcmeq Vn.D2, Vm.D2, Va.D2
fcmeq Hn, Hm, 0
fcmeq Sn, Sm, 0
fcmeq Dn, Dm, 0
fcmeq Vn.H8, Vm.H8, 0
fcmeq Vn.H4, Vm.H4, 0
fcmeq Vn.S4, Vm.S4, 0
fcmeq Vn.S2, Vm.S2, 0
fcmeq Vn.D2, Vm.D2, 0

fcmge

fcmge Hn, Hm, Ha
fcmge Sn, Sm, Sa
fcmge Dn, Dm, Da
fcmge Vn.H8, Vm.H8, Va.H8
fcmge Vn.H4, Vm.H4, Va.H4
fcmge Vn.S4, Vm.S4, Va.S4
fcmge Vn.S2, Vm.S2, Va.S2
fcmge Vn.D2, Vm.D2, Va.D2
fcmge Hn, Hm, 0
fcmge Sn, Sm, 0
fcmge Dn, Dm, 0
fcmge Vn.H8, Vm.H8, 0
fcmge Vn.H4, Vm.H4, 0
fcmge Vn.S4, Vm.S4, 0
fcmge Vn.S2, Vm.S2, 0
fcmge Vn.D2, Vm.D2, 0

fcmgt

fcmgt Hn, Hm, Ha
fcmgt Sn, Sm, Sa
fcmgt Dn, Dm, Da
fcmgt Vn.H8, Vm.H8, Va.H8
fcmgt Vn.H4, Vm.H4, Va.H4
fcmgt Vn.S4, Vm.S4, Va.S4
fcmgt Vn.S2, Vm.S2, Va.S2
fcmgt Vn.D2, Vm.D2, Va.D2
fcmgt Hn, Hm, 0
fcmgt Sn, Sm, 0
fcmgt Dn, Dm, 0
fcmgt Vn.H8, Vm.H8, 0
fcmgt Vn.H4, Vm.H4, 0
fcmgt Vn.S4, Vm.S4, 0
fcmgt Vn.S2, Vm.S2, 0
fcmgt Vn.D2, Vm.D2, 0

fcmla

fcmla Vn.H4, Vm.H4, Va.H[uimm], #uimm1                   (#uimm < 2, #uimm1 = [0, 90, 180, 270])
fcmla Vn.H8, Vm.H8, Va.H[uimm], #uimm1                   (#uimm < 4, #uimm1 = [0, 90, 180, 270])
fcmla Vn.S4, Vm.S4, Va.S[uimm], #uimm1                   (#uimm < 2, #uimm1 = [0, 90, 180, 270])
fcmla Vn.H8, Vm.H8, Va.H8, #uimm                                     (#uimm = [0, 90, 180, 270])
fcmla Vn.H4, Vm.H4, Va.H4, #uimm                                     (#uimm = [0, 90, 180, 270])
fcmla Vn.S4, Vm.S4, Va.S4, #uimm                                     (#uimm = [0, 90, 180, 270])
fcmla Vn.S2, Vm.S2, Va.S2, #uimm                                     (#uimm = [0, 90, 180, 270])
fcmla Vn.D2, Vm.D2, Va.D2, #uimm                                     (#uimm = [0, 90, 180, 270])

fcmle

fcmle Hn, Hm, 0
fcmle Sn, Sm, 0
fcmle Dn, Dm, 0
fcmle Vn.H8, Vm.H8, 0
fcmle Vn.H4, Vm.H4, 0
fcmle Vn.S4, Vm.S4, 0
fcmle Vn.S2, Vm.S2, 0
fcmle Vn.D2, Vm.D2, 0

fcmlt

fcmlt Hn, Hm, 0
fcmlt Sn, Sm, 0
fcmlt Dn, Dm, 0
fcmlt Vn.H8, Vm.H8, 0
fcmlt Vn.H4, Vm.H4, 0
fcmlt Vn.S4, Vm.S4, 0
fcmlt Vn.S2, Vm.S2, 0
fcmlt Vn.D2, Vm.D2, 0

fcmp

fcmp Hn, Hm
fcmp Hn, 0
fcmp Sn, Sm
fcmp Sn, 0
fcmp Dn, Dm
fcmp Dn, 0

fcmpe

fcmpe Hn, Hm
fcmpe Hn, 0
fcmpe Sn, Sm
fcmpe Sn, 0
fcmpe Dn, Dm
fcmpe Dn, 0

fcsel

fcsel Hn, Hm, Ha, <cond>
fcsel Sn, Sm, Sa, <cond>
fcsel Dn, Dm, Da, <cond>

fcvt

fcvt Sn, Hm
fcvt Dn, Hm
fcvt Hn, Sm
fcvt Dn, Sm
fcvt Hn, Dm
fcvt Sn, Dm

fcvtas

fcvtas Hn, Hm
fcvtas Sn, Sm
fcvtas Dn, Dm
fcvtas Vn.H8, Vm.H8
fcvtas Vn.H4, Vm.H4
fcvtas Vn.S4, Vm.S4
fcvtas Vn.S2, Vm.S2
fcvtas Vn.D2, Vm.D2
fcvtas Wn, Hm
fcvtas Xn, Hm
fcvtas Wn, Sm
fcvtas Xn, Sm
fcvtas Wn, Dm
fcvtas Xn, Dm

fcvtau

fcvtau Hn, Hm
fcvtau Sn, Sm
fcvtau Dn, Dm
fcvtau Vn.H8, Vm.H8
fcvtau Vn.H4, Vm.H4
fcvtau Vn.S4, Vm.S4
fcvtau Vn.S2, Vm.S2
fcvtau Vn.D2, Vm.D2
fcvtau Wn, Hm
fcvtau Xn, Hm
fcvtau Wn, Sm
fcvtau Xn, Sm
fcvtau Wn, Dm
fcvtau Xn, Dm

fcvtl

fcvtl Vn.S4, Vm.H4
fcvtl Vn.D2, Vm.S2

fcvtl2

fcvtl2 Vn.S4, Vm.H8
fcvtl2 Vn.D2, Vm.S4

fcvtms

fcvtms Hn, Hm
fcvtms Sn, Sm
fcvtms Dn, Dm
fcvtms Vn.H8, Vm.H8
fcvtms Vn.H4, Vm.H4
fcvtms Vn.S4, Vm.S4
fcvtms Vn.S2, Vm.S2
fcvtms Vn.D2, Vm.D2
fcvtms Wn, Hm
fcvtms Xn, Hm
fcvtms Wn, Sm
fcvtms Xn, Sm
fcvtms Wn, Dm
fcvtms Xn, Dm

fcvtmu

fcvtmu Hn, Hm
fcvtmu Sn, Sm
fcvtmu Dn, Dm
fcvtmu Vn.H8, Vm.H8
fcvtmu Vn.H4, Vm.H4
fcvtmu Vn.S4, Vm.S4
fcvtmu Vn.S2, Vm.S2
fcvtmu Vn.D2, Vm.D2
fcvtmu Wn, Hm
fcvtmu Xn, Hm
fcvtmu Wn, Sm
fcvtmu Xn, Sm
fcvtmu Wn, Dm
fcvtmu Xn, Dm

fcvtn

fcvtn Vn.S2, Vm.D2

fcvtn2

fcvtn2 Vn.S4, Vm.D2

fcvtns

fcvtns Hn, Hm
fcvtns Sn, Sm
fcvtns Dn, Dm
fcvtns Vn.H8, Vm.H8
fcvtns Vn.H4, Vm.H4
fcvtns Vn.S4, Vm.S4
fcvtns Vn.S2, Vm.S2
fcvtns Vn.D2, Vm.D2
fcvtns Wn, Hm
fcvtns Xn, Hm
fcvtns Wn, Sm
fcvtns Xn, Sm
fcvtns Wn, Dm
fcvtns Xn, Dm

fcvtnu

fcvtnu Hn, Hm
fcvtnu Sn, Sm
fcvtnu Dn, Dm
fcvtnu Vn.H8, Vm.H8
fcvtnu Vn.H4, Vm.H4
fcvtnu Vn.S4, Vm.S4
fcvtnu Vn.S2, Vm.S2
fcvtnu Vn.D2, Vm.D2
fcvtnu Wn, Hm
fcvtnu Xn, Hm
fcvtnu Wn, Sm
fcvtnu Xn, Sm
fcvtnu Wn, Dm
fcvtnu Xn, Dm

fcvtps

fcvtps Hn, Hm
fcvtps Sn, Sm
fcvtps Dn, Dm
fcvtps Vn.H8, Vm.H8
fcvtps Vn.H4, Vm.H4
fcvtps Vn.S4, Vm.S4
fcvtps Vn.S2, Vm.S2
fcvtps Vn.D2, Vm.D2
fcvtps Wn, Hm
fcvtps Xn, Hm
fcvtps Wn, Sm
fcvtps Xn, Sm
fcvtps Wn, Dm
fcvtps Xn, Dm

fcvtpu

fcvtpu Hn, Hm
fcvtpu Sn, Sm
fcvtpu Dn, Dm
fcvtpu Vn.H8, Vm.H8
fcvtpu Vn.H4, Vm.H4
fcvtpu Vn.S4, Vm.S4
fcvtpu Vn.S2, Vm.S2
fcvtpu Vn.D2, Vm.D2
fcvtpu Wn, Hm
fcvtpu Xn, Hm
fcvtpu Wn, Sm
fcvtpu Xn, Sm
fcvtpu Wn, Dm
fcvtpu Xn, Dm

fcvtxn

fcvtxn Sn, Dm
fcvtxn Vn.S2, Vm.D2

fcvtxn2

fcvtxn2 Vn.S4, Vm.D2

fcvtzs

fcvtzs Hn, Hm, #uimm                                                          (1 <= #uimm <= 16)
fcvtzs Sn, Sm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzs Dn, Dm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzs Vn.H8, Vm.H8, #uimm                                                    (1 <= #uimm <= 16)
fcvtzs Vn.H4, Vm.H4, #uimm                                                    (1 <= #uimm <= 16)
fcvtzs Vn.S4, Vm.S4, #uimm                                                    (1 <= #uimm <= 32)
fcvtzs Vn.S2, Vm.S2, #uimm                                                    (1 <= #uimm <= 32)
fcvtzs Vn.D2, Vm.D2, #uimm                                                    (1 <= #uimm <= 64)
fcvtzs Hn, Hm
fcvtzs Sn, Sm
fcvtzs Dn, Dm
fcvtzs Vn.H8, Vm.H8
fcvtzs Vn.H4, Vm.H4
fcvtzs Vn.S4, Vm.S4
fcvtzs Vn.S2, Vm.S2
fcvtzs Vn.D2, Vm.D2
fcvtzs Wn, Hm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzs Xn, Hm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzs Wn, Sm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzs Xn, Sm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzs Wn, Dm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzs Xn, Dm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzs Wn, Hm
fcvtzs Xn, Hm
fcvtzs Wn, Sm
fcvtzs Xn, Sm
fcvtzs Wn, Dm
fcvtzs Xn, Dm

fcvtzu

fcvtzu Hn, Hm, #uimm                                                          (1 <= #uimm <= 16)
fcvtzu Sn, Sm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzu Dn, Dm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzu Vn.H8, Vm.H8, #uimm                                                    (1 <= #uimm <= 16)
fcvtzu Vn.H4, Vm.H4, #uimm                                                    (1 <= #uimm <= 16)
fcvtzu Vn.S4, Vm.S4, #uimm                                                    (1 <= #uimm <= 32)
fcvtzu Vn.S2, Vm.S2, #uimm                                                    (1 <= #uimm <= 32)
fcvtzu Vn.D2, Vm.D2, #uimm                                                    (1 <= #uimm <= 64)
fcvtzu Hn, Hm
fcvtzu Sn, Sm
fcvtzu Dn, Dm
fcvtzu Vn.H8, Vm.H8
fcvtzu Vn.H4, Vm.H4
fcvtzu Vn.S4, Vm.S4
fcvtzu Vn.S2, Vm.S2
fcvtzu Vn.D2, Vm.D2
fcvtzu Wn, Hm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzu Xn, Hm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzu Wn, Sm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzu Xn, Sm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzu Wn, Dm, #uimm                                                          (1 <= #uimm <= 32)
fcvtzu Xn, Dm, #uimm                                                          (1 <= #uimm <= 64)
fcvtzu Wn, Hm
fcvtzu Xn, Hm
fcvtzu Wn, Sm
fcvtzu Xn, Sm
fcvtzu Wn, Dm
fcvtzu Xn, Dm

fdiv

fdiv Vn.H8, Vm.H8, Va.H8
fdiv Vn.H4, Vm.H4, Va.H4
fdiv Vn.S4, Vm.S4, Va.S4
fdiv Vn.S2, Vm.S2, Va.S2
fdiv Vn.D2, Vm.D2, Va.D2
fdiv Hn, Hm, Ha
fdiv Sn, Sm, Sa
fdiv Dn, Dm, Da

fjcvtzs

fjcvtzs Wn, Dm

fmadd

fmadd Hn, Hm, Ha, Hb
fmadd Sn, Sm, Sa, Sb
fmadd Dn, Dm, Da, Db

fmax

fmax Vn.H8, Vm.H8, Va.H8
fmax Vn.H4, Vm.H4, Va.H4
fmax Vn.S4, Vm.S4, Va.S4
fmax Vn.S2, Vm.S2, Va.S2
fmax Vn.D2, Vm.D2, Va.D2
fmax Hn, Hm, Ha
fmax Sn, Sm, Sa
fmax Dn, Dm, Da

fmaxnm

fmaxnm Vn.H8, Vm.H8, Va.H8
fmaxnm Vn.H4, Vm.H4, Va.H4
fmaxnm Vn.S4, Vm.S4, Va.S4
fmaxnm Vn.S2, Vm.S2, Va.S2
fmaxnm Vn.D2, Vm.D2, Va.D2
fmaxnm Hn, Hm, Ha
fmaxnm Sn, Sm, Sa
fmaxnm Dn, Dm, Da

fmaxnmp

fmaxnmp Hn, Vm.H2
fmaxnmp Sn, Vm.S2
fmaxnmp Dn, Vm.D2
fmaxnmp Vn.H8, Vm.H8, Va.H8
fmaxnmp Vn.H4, Vm.H4, Va.H4
fmaxnmp Vn.S4, Vm.S4, Va.S4
fmaxnmp Vn.S2, Vm.S2, Va.S2
fmaxnmp Vn.D2, Vm.D2, Va.D2

fmaxnmv

fmaxnmv Hn, Vm.H8
fmaxnmv Hn, Vm.H4
fmaxnmv Sn, Vm.S4

fmaxp

fmaxp Hn, Vm.H2
fmaxp Sn, Vm.S2
fmaxp Dn, Vm.D2
fmaxp Vn.H8, Vm.H8, Va.H8
fmaxp Vn.H4, Vm.H4, Va.H4
fmaxp Vn.S4, Vm.S4, Va.S4
fmaxp Vn.S2, Vm.S2, Va.S2
fmaxp Vn.D2, Vm.D2, Va.D2

fmaxv

fmaxv Hn, Vm.H8
fmaxv Hn, Vm.H4
fmaxv Sn, Vm.S4

fmin

fmin Vn.H8, Vm.H8, Va.H8
fmin Vn.H4, Vm.H4, Va.H4
fmin Vn.S4, Vm.S4, Va.S4
fmin Vn.S2, Vm.S2, Va.S2
fmin Vn.D2, Vm.D2, Va.D2
fmin Hn, Hm, Ha
fmin Sn, Sm, Sa
fmin Dn, Dm, Da

fminnm

fminnm Vn.H8, Vm.H8, Va.H8
fminnm Vn.H4, Vm.H4, Va.H4
fminnm Vn.S4, Vm.S4, Va.S4
fminnm Vn.S2, Vm.S2, Va.S2
fminnm Vn.D2, Vm.D2, Va.D2
fminnm Hn, Hm, Ha
fminnm Sn, Sm, Sa
fminnm Dn, Dm, Da

fminnmp

fminnmp Hn, Vm.H2
fminnmp Sn, Vm.S2
fminnmp Dn, Vm.D2
fminnmp Vn.H8, Vm.H8, Va.H8
fminnmp Vn.H4, Vm.H4, Va.H4
fminnmp Vn.S4, Vm.S4, Va.S4
fminnmp Vn.S2, Vm.S2, Va.S2
fminnmp Vn.D2, Vm.D2, Va.D2

fminnmv

fminnmv Hn, Vm.H8
fminnmv Hn, Vm.H4
fminnmv Sn, Vm.S4

fminp

fminp Hn, Vm.H2
fminp Sn, Vm.S2
fminp Dn, Vm.D2
fminp Vn.H8, Vm.H8, Va.H8
fminp Vn.H4, Vm.H4, Va.H4
fminp Vn.S4, Vm.S4, Va.S4
fminp Vn.S2, Vm.S2, Va.S2
fminp Vn.D2, Vm.D2, Va.D2

fminv

fminv Hn, Vm.H8
fminv Hn, Vm.H4
fminv Sn, Vm.S4

fmla

fmla Hn, Hm, Va.H[uimm]                                                   (a is 0-15, #uimm < 8)
fmla Sn, Sm, Va.S[uimm]                                                              (#uimm < 4)
fmla Dn, Dm, Va.D[uimm]                                                              (#uimm < 2)
fmla Vn.H8, Vm.H8, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmla Vn.H4, Vm.H4, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmla Vn.S4, Vm.S4, Va.S[uimm]                                                        (#uimm < 4)
fmla Vn.S2, Vm.S2, Va.S[uimm]                                                        (#uimm < 4)
fmla Vn.D2, Vm.D2, Va.D[uimm]                                                        (#uimm < 2)
fmla Vn.H8, Vm.H8, Va.H8
fmla Vn.H4, Vm.H4, Va.H4
fmla Vn.S4, Vm.S4, Va.S4
fmla Vn.S2, Vm.S2, Va.S2
fmla Vn.D2, Vm.D2, Va.D2

fmlal

fmlal Vn.S2, Vm.H2, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmlal Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmlal Vn.S2, Vm.H2, Va.H2
fmlal Vn.S4, Vm.H4, Va.H4

fmlal2

fmlal2 Vn.S2, Vm.H2, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
fmlal2 Vn.S4, Vm.H4, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
fmlal2 Vn.S2, Vm.H2, Va.H2
fmlal2 Vn.S4, Vm.H4, Va.H4

fmls

fmls Hn, Hm, Va.H[uimm]                                                   (a is 0-15, #uimm < 8)
fmls Sn, Sm, Va.S[uimm]                                                              (#uimm < 4)
fmls Dn, Dm, Va.D[uimm]                                                              (#uimm < 2)
fmls Vn.H8, Vm.H8, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmls Vn.H4, Vm.H4, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmls Vn.S4, Vm.S4, Va.S[uimm]                                                        (#uimm < 4)
fmls Vn.S2, Vm.S2, Va.S[uimm]                                                        (#uimm < 4)
fmls Vn.D2, Vm.D2, Va.D[uimm]                                                        (#uimm < 2)
fmls Vn.H8, Vm.H8, Va.H8
fmls Vn.H4, Vm.H4, Va.H4
fmls Vn.S4, Vm.S4, Va.S4
fmls Vn.S2, Vm.S2, Va.S2
fmls Vn.D2, Vm.D2, Va.D2

fmlsl

fmlsl Vn.S2, Vm.H2, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmlsl Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmlsl Vn.S2, Vm.H2, Va.H2
fmlsl Vn.S4, Vm.H4, Va.H4

fmlsl2

fmlsl2 Vn.S2, Vm.H2, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
fmlsl2 Vn.S4, Vm.H4, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
fmlsl2 Vn.S2, Vm.H2, Va.H2
fmlsl2 Vn.S4, Vm.H4, Va.H4

fmov

fmov Vn.H8, #imm                                            (#imm is a floating point immediate)
fmov Vn.H4, #imm                                            (#imm is a floating point immediate)
fmov Vn.S4, #imm                                            (#imm is a floating point immediate)
fmov Vn.S2, #imm                                            (#imm is a floating point immediate)
fmov Vn.D2, #imm                                            (#imm is a floating point immediate)
fmov Hn, Hm
fmov Sn, Sm
fmov Dn, Dm
fmov Wn, Hm
fmov Xn, Hm
fmov Hn, Wm
fmov Sn, Wm
fmov Wn, Sm
fmov Hn, Xm
fmov Dn, Xm
fmov Vn.D[1], Xm
fmov Xn, Dm
fmov Xn, Vm.D[1]
fmov Hn, #imm                                               (#imm is a floating point immediate)
fmov Sn, #imm                                               (#imm is a floating point immediate)
fmov Dn, #imm                                               (#imm is a floating point immediate)

fmsub

fmsub Hn, Hm, Ha, Hb
fmsub Sn, Sm, Sa, Sb
fmsub Dn, Dm, Da, Db

fmul

fmul Hn, Hm, Va.H[uimm]                                                   (a is 0-15, #uimm < 8)
fmul Sn, Sm, Va.S[uimm]                                                              (#uimm < 4)
fmul Dn, Dm, Va.D[uimm]                                                              (#uimm < 2)
fmul Vn.H8, Vm.H8, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmul Vn.H4, Vm.H4, Va.H[uimm]                                             (a is 0-15, #uimm < 8)
fmul Vn.S4, Vm.S4, Va.S[uimm]                                                        (#uimm < 4)
fmul Vn.S2, Vm.S2, Va.S[uimm]                                                        (#uimm < 4)
fmul Vn.D2, Vm.D2, Va.D[uimm]                                                        (#uimm < 2)
fmul Vn.H8, Vm.H8, Va.H8
fmul Vn.H4, Vm.H4, Va.H4
fmul Vn.S4, Vm.S4, Va.S4
fmul Vn.S2, Vm.S2, Va.S2
fmul Vn.D2, Vm.D2, Va.D2
fmul Hn, Hm, Ha
fmul Sn, Sm, Sa
fmul Dn, Dm, Da

fmulx

fmulx Hn, Hm, Va.H[uimm]                                                  (a is 0-15, #uimm < 8)
fmulx Sn, Sm, Va.S[uimm]                                                             (#uimm < 4)
fmulx Dn, Dm, Va.D[uimm]                                                             (#uimm < 2)
fmulx Vn.H8, Vm.H8, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmulx Vn.H4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
fmulx Vn.S4, Vm.S4, Va.S[uimm]                                                       (#uimm < 4)
fmulx Vn.S2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
fmulx Vn.D2, Vm.D2, Va.D[uimm]                                                       (#uimm < 2)
fmulx Hn, Hm, Ha
fmulx Sn, Sm, Sa
fmulx Dn, Dm, Da
fmulx Vn.H8, Vm.H8, Va.H8
fmulx Vn.H4, Vm.H4, Va.H4
fmulx Vn.S4, Vm.S4, Va.S4
fmulx Vn.S2, Vm.S2, Va.S2
fmulx Vn.D2, Vm.D2, Va.D2

fneg

fneg Vn.H8, Vm.H8
fneg Vn.H4, Vm.H4
fneg Vn.S4, Vm.S4
fneg Vn.S2, Vm.S2
fneg Vn.D2, Vm.D2
fneg Hn, Hm
fneg Sn, Sm
fneg Dn, Dm

fnmadd

fnmadd Hn, Hm, Ha, Hb
fnmadd Sn, Sm, Sa, Sb
fnmadd Dn, Dm, Da, Db

fnmsub

fnmsub Hn, Hm, Ha, Hb
fnmsub Sn, Sm, Sa, Sb
fnmsub Dn, Dm, Da, Db

fnmul

fnmul Hn, Hm, Ha
fnmul Sn, Sm, Sa
fnmul Dn, Dm, Da

frecpe

frecpe Hn, Hm
frecpe Sn, Sm
frecpe Dn, Dm
frecpe Vn.H8, Vm.H8
frecpe Vn.H4, Vm.H4
frecpe Vn.S4, Vm.S4
frecpe Vn.S2, Vm.S2
frecpe Vn.D2, Vm.D2

frecps

frecps Hn, Hm, Ha
frecps Sn, Sm, Sa
frecps Dn, Dm, Da
frecps Vn.H8, Vm.H8, Va.H8
frecps Vn.H4, Vm.H4, Va.H4
frecps Vn.S4, Vm.S4, Va.S4
frecps Vn.S2, Vm.S2, Va.S2
frecps Vn.D2, Vm.D2, Va.D2

frecpx

frecpx Hn, Hm
frecpx Sn, Sm
frecpx Dn, Dm

frinta

frinta Vn.H8, Vm.H8
frinta Vn.H4, Vm.H4
frinta Vn.S4, Vm.S4
frinta Vn.S2, Vm.S2
frinta Vn.D2, Vm.D2
frinta Hn, Hm
frinta Sn, Sm
frinta Dn, Dm

frinti

frinti Vn.H8, Vm.H8
frinti Vn.H4, Vm.H4
frinti Vn.S4, Vm.S4
frinti Vn.S2, Vm.S2
frinti Vn.D2, Vm.D2
frinti Hn, Hm
frinti Sn, Sm
frinti Dn, Dm

frintm

frintm Vn.H8, Vm.H8
frintm Vn.H4, Vm.H4
frintm Vn.S4, Vm.S4
frintm Vn.S2, Vm.S2
frintm Vn.D2, Vm.D2
frintm Hn, Hm
frintm Sn, Sm
frintm Dn, Dm

frintn

frintn Vn.H8, Vm.H8
frintn Vn.H4, Vm.H4
frintn Vn.S4, Vm.S4
frintn Vn.S2, Vm.S2
frintn Vn.D2, Vm.D2
frintn Hn, Hm
frintn Sn, Sm
frintn Dn, Dm

frintp

frintp Vn.H8, Vm.H8
frintp Vn.H4, Vm.H4
frintp Vn.S4, Vm.S4
frintp Vn.S2, Vm.S2
frintp Vn.D2, Vm.D2
frintp Hn, Hm
frintp Sn, Sm
frintp Dn, Dm

frintx

frintx Vn.H8, Vm.H8
frintx Vn.H4, Vm.H4
frintx Vn.S4, Vm.S4
frintx Vn.S2, Vm.S2
frintx Vn.D2, Vm.D2
frintx Hn, Hm
frintx Sn, Sm
frintx Dn, Dm

frintz

frintz Vn.H8, Vm.H8
frintz Vn.H4, Vm.H4
frintz Vn.S4, Vm.S4
frintz Vn.S2, Vm.S2
frintz Vn.D2, Vm.D2
frintz Hn, Hm
frintz Sn, Sm
frintz Dn, Dm

frsqrte

frsqrte Hn, Hm
frsqrte Sn, Sm
frsqrte Dn, Dm
frsqrte Vn.H8, Vm.H8
frsqrte Vn.H4, Vm.H4
frsqrte Vn.S4, Vm.S4
frsqrte Vn.S2, Vm.S2
frsqrte Vn.D2, Vm.D2

frsqrts

frsqrts Hn, Hm, Ha
frsqrts Sn, Sm, Sa
frsqrts Dn, Dm, Da
frsqrts Vn.H8, Vm.H8, Va.H8
frsqrts Vn.H4, Vm.H4, Va.H4
frsqrts Vn.S4, Vm.S4, Va.S4
frsqrts Vn.S2, Vm.S2, Va.S2
frsqrts Vn.D2, Vm.D2, Va.D2

fsqrt

fsqrt Vn.H8, Vm.H8
fsqrt Vn.H4, Vm.H4
fsqrt Vn.S4, Vm.S4
fsqrt Vn.S2, Vm.S2
fsqrt Vn.D2, Vm.D2
fsqrt Hn, Hm
fsqrt Sn, Sm
fsqrt Dn, Dm

fsub

fsub Vn.H8, Vm.H8, Va.H8
fsub Vn.H4, Vm.H4, Va.H4
fsub Vn.S4, Vm.S4, Va.S4
fsub Vn.S2, Vm.S2, Va.S2
fsub Vn.D2, Vm.D2, Va.D2
fsub Hn, Hm, Ha
fsub Sn, Sm, Sa
fsub Dn, Dm, Da

hint

hint #uimm                                                                         (#uimm < 128)

hlt

hlt #uimm                                                                        (#uimm < 65536)

hvc

hvc #uimm                                                                        (#uimm < 65536)

ic

ic ivau, Xn
ic ic_op

ins

ins Vn.B[uimm], Vm.B[uimm1]                                            (#uimm < 16, #uimm1 < 16)
ins Vn.H[uimm], Vm.H[uimm1]                                              (#uimm < 8, #uimm1 < 8)
ins Vn.S[uimm], Vm.S[uimm1]                                              (#uimm < 4, #uimm1 < 4)
ins Vn.D[uimm], Vm.D[uimm1]                                              (#uimm < 2, #uimm1 < 2)
ins Vn.B[uimm], Wm                                                                  (#uimm < 16)
ins Vn.H[uimm], Wm                                                                   (#uimm < 8)
ins Vn.S[uimm], Wm                                                                   (#uimm < 4)
ins Vn.D[uimm], Xm                                                                   (#uimm < 2)

isb

isb sy
isb #uimm                                                                           (#uimm < 16)
isb

ld1

ld1 {Vn.B16 * 1}, [Xm|SP]
ld1 {Vn.B8 * 1}, [Xm|SP]
ld1 {Vn.H8 * 1}, [Xm|SP]
ld1 {Vn.H4 * 1}, [Xm|SP]
ld1 {Vn.S4 * 1}, [Xm|SP]
ld1 {Vn.S2 * 1}, [Xm|SP]
ld1 {Vn.D2 * 1}, [Xm|SP]
ld1 {Vn.D1 * 1}, [Xm|SP]
ld1 {Vn.B16 * 2}, [Xm|SP]
ld1 {Vn.B8 * 2}, [Xm|SP]
ld1 {Vn.H8 * 2}, [Xm|SP]
ld1 {Vn.H4 * 2}, [Xm|SP]
ld1 {Vn.S4 * 2}, [Xm|SP]
ld1 {Vn.S2 * 2}, [Xm|SP]
ld1 {Vn.D2 * 2}, [Xm|SP]
ld1 {Vn.D1 * 2}, [Xm|SP]
ld1 {Vn.B16 * 3}, [Xm|SP]
ld1 {Vn.B8 * 3}, [Xm|SP]
ld1 {Vn.H8 * 3}, [Xm|SP]
ld1 {Vn.H4 * 3}, [Xm|SP]
ld1 {Vn.S4 * 3}, [Xm|SP]
ld1 {Vn.S2 * 3}, [Xm|SP]
ld1 {Vn.D2 * 3}, [Xm|SP]
ld1 {Vn.D1 * 3}, [Xm|SP]
ld1 {Vn.B16 * 4}, [Xm|SP]
ld1 {Vn.B8 * 4}, [Xm|SP]
ld1 {Vn.H8 * 4}, [Xm|SP]
ld1 {Vn.H4 * 4}, [Xm|SP]
ld1 {Vn.S4 * 4}, [Xm|SP]
ld1 {Vn.S2 * 4}, [Xm|SP]
ld1 {Vn.D2 * 4}, [Xm|SP]
ld1 {Vn.D1 * 4}, [Xm|SP]
ld1 {Vn.B8 * 1}, [Xm|SP], 8
ld1 {Vn.H4 * 1}, [Xm|SP], 8
ld1 {Vn.S2 * 1}, [Xm|SP], 8
ld1 {Vn.D1 * 1}, [Xm|SP], 8
ld1 {Vn.B16 * 1}, [Xm|SP], 16
ld1 {Vn.H8 * 1}, [Xm|SP], 16
ld1 {Vn.S4 * 1}, [Xm|SP], 16
ld1 {Vn.D2 * 1}, [Xm|SP], 16
ld1 {Vn.B16 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1 {Vn.B8 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H8 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H4 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S4 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S2 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D2 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D1 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.B8 * 2}, [Xm|SP], 16
ld1 {Vn.H4 * 2}, [Xm|SP], 16
ld1 {Vn.S2 * 2}, [Xm|SP], 16
ld1 {Vn.D1 * 2}, [Xm|SP], 16
ld1 {Vn.B16 * 2}, [Xm|SP], 32
ld1 {Vn.H8 * 2}, [Xm|SP], 32
ld1 {Vn.S4 * 2}, [Xm|SP], 32
ld1 {Vn.D2 * 2}, [Xm|SP], 32
ld1 {Vn.B16 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld1 {Vn.B8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D1 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.B8 * 3}, [Xm|SP], 24
ld1 {Vn.H4 * 3}, [Xm|SP], 24
ld1 {Vn.S2 * 3}, [Xm|SP], 24
ld1 {Vn.D1 * 3}, [Xm|SP], 24
ld1 {Vn.B16 * 3}, [Xm|SP], 48
ld1 {Vn.H8 * 3}, [Xm|SP], 48
ld1 {Vn.S4 * 3}, [Xm|SP], 48
ld1 {Vn.D2 * 3}, [Xm|SP], 48
ld1 {Vn.B16 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld1 {Vn.B8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D1 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.B8 * 4}, [Xm|SP], 32
ld1 {Vn.H4 * 4}, [Xm|SP], 32
ld1 {Vn.S2 * 4}, [Xm|SP], 32
ld1 {Vn.D1 * 4}, [Xm|SP], 32
ld1 {Vn.B16 * 4}, [Xm|SP], 64
ld1 {Vn.H8 * 4}, [Xm|SP], 64
ld1 {Vn.S4 * 4}, [Xm|SP], 64
ld1 {Vn.D2 * 4}, [Xm|SP], 64
ld1 {Vn.B16 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld1 {Vn.B8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.H4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.S2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.D1 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld1 {Vn.B * 1}[uimm], [Xm|SP]                                                       (#uimm < 16)
ld1 {Vn.H * 1}[uimm], [Xm|SP]                                                        (#uimm < 8)
ld1 {Vn.S * 1}[uimm], [Xm|SP]                                                        (#uimm < 4)
ld1 {Vn.D * 1}[uimm], [Xm|SP]                                                        (#uimm < 2)
ld1 {Vn.B * 1}[uimm], [Xm|SP], 1                                                    (#uimm < 16)
ld1 {Vn.B * 1}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
ld1 {Vn.H * 1}[uimm], [Xm|SP], 2                                                     (#uimm < 8)
ld1 {Vn.H * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
ld1 {Vn.S * 1}[uimm], [Xm|SP], 4                                                     (#uimm < 4)
ld1 {Vn.S * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
ld1 {Vn.D * 1}[uimm], [Xm|SP], 8                                                     (#uimm < 2)
ld1 {Vn.D * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

ld1r

ld1r {Vn.B16 * 1}, [Xm|SP]
ld1r {Vn.B8 * 1}, [Xm|SP]
ld1r {Vn.H8 * 1}, [Xm|SP]
ld1r {Vn.H4 * 1}, [Xm|SP]
ld1r {Vn.S4 * 1}, [Xm|SP]
ld1r {Vn.S2 * 1}, [Xm|SP]
ld1r {Vn.D2 * 1}, [Xm|SP]
ld1r {Vn.D1 * 1}, [Xm|SP]
ld1r {Vn.B16 * 1}, [Xm|SP], 1
ld1r {Vn.B8 * 1}, [Xm|SP], 1
ld1r {Vn.H8 * 1}, [Xm|SP], 2
ld1r {Vn.H4 * 1}, [Xm|SP], 2
ld1r {Vn.S4 * 1}, [Xm|SP], 4
ld1r {Vn.S2 * 1}, [Xm|SP], 4
ld1r {Vn.D2 * 1}, [Xm|SP], 8
ld1r {Vn.D1 * 1}, [Xm|SP], 8
ld1r {Vn.B16 * 1}, [Xm|SP], Xa                                                       (a is 0-30)
ld1r {Vn.B8 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.H8 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.H4 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.S4 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.S2 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.D2 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
ld1r {Vn.D1 * 1}, [Xm|SP], Xa                                                        (a is 0-30)

ld2

ld2 {Vn.B16 * 2}, [Xm|SP]
ld2 {Vn.B8 * 2}, [Xm|SP]
ld2 {Vn.H8 * 2}, [Xm|SP]
ld2 {Vn.H4 * 2}, [Xm|SP]
ld2 {Vn.S4 * 2}, [Xm|SP]
ld2 {Vn.S2 * 2}, [Xm|SP]
ld2 {Vn.D2 * 2}, [Xm|SP]
ld2 {Vn.B8 * 2}, [Xm|SP], 16
ld2 {Vn.H4 * 2}, [Xm|SP], 16
ld2 {Vn.S2 * 2}, [Xm|SP], 16
ld2 {Vn.B16 * 2}, [Xm|SP], 32
ld2 {Vn.H8 * 2}, [Xm|SP], 32
ld2 {Vn.S4 * 2}, [Xm|SP], 32
ld2 {Vn.D2 * 2}, [Xm|SP], 32
ld2 {Vn.B16 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2 {Vn.B8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.H8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.H4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.S4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.S2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.D2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
ld2 {Vn.B * 2}[uimm], [Xm|SP]                                                       (#uimm < 16)
ld2 {Vn.H * 2}[uimm], [Xm|SP]                                                        (#uimm < 8)
ld2 {Vn.S * 2}[uimm], [Xm|SP]                                                        (#uimm < 4)
ld2 {Vn.D * 2}[uimm], [Xm|SP]                                                        (#uimm < 2)
ld2 {Vn.B * 2}[uimm], [Xm|SP], 2                                                    (#uimm < 16)
ld2 {Vn.B * 2}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
ld2 {Vn.H * 2}[uimm], [Xm|SP], 4                                                     (#uimm < 8)
ld2 {Vn.H * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
ld2 {Vn.S * 2}[uimm], [Xm|SP], 8                                                     (#uimm < 4)
ld2 {Vn.S * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
ld2 {Vn.D * 2}[uimm], [Xm|SP], 16                                                    (#uimm < 2)
ld2 {Vn.D * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

ld2r

ld2r {Vn.B16 * 2}, [Xm|SP]
ld2r {Vn.B8 * 2}, [Xm|SP]
ld2r {Vn.H8 * 2}, [Xm|SP]
ld2r {Vn.H4 * 2}, [Xm|SP]
ld2r {Vn.S4 * 2}, [Xm|SP]
ld2r {Vn.S2 * 2}, [Xm|SP]
ld2r {Vn.D2 * 2}, [Xm|SP]
ld2r {Vn.D1 * 2}, [Xm|SP]
ld2r {Vn.B16 * 2}, [Xm|SP], 2
ld2r {Vn.B8 * 2}, [Xm|SP], 2
ld2r {Vn.H8 * 2}, [Xm|SP], 4
ld2r {Vn.H4 * 2}, [Xm|SP], 4
ld2r {Vn.S4 * 2}, [Xm|SP], 8
ld2r {Vn.S2 * 2}, [Xm|SP], 8
ld2r {Vn.D2 * 2}, [Xm|SP], 16
ld2r {Vn.D1 * 2}, [Xm|SP], 16
ld2r {Vn.B16 * 2}, [Xm|SP], Xa                                                       (a is 0-30)
ld2r {Vn.B8 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.H8 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.H4 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.S4 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.S2 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.D2 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
ld2r {Vn.D1 * 2}, [Xm|SP], Xa                                                        (a is 0-30)

ld3

ld3 {Vn.B16 * 3}, [Xm|SP]
ld3 {Vn.B8 * 3}, [Xm|SP]
ld3 {Vn.H8 * 3}, [Xm|SP]
ld3 {Vn.H4 * 3}, [Xm|SP]
ld3 {Vn.S4 * 3}, [Xm|SP]
ld3 {Vn.S2 * 3}, [Xm|SP]
ld3 {Vn.D2 * 3}, [Xm|SP]
ld3 {Vn.B8 * 3}, [Xm|SP], 24
ld3 {Vn.H4 * 3}, [Xm|SP], 24
ld3 {Vn.S2 * 3}, [Xm|SP], 24
ld3 {Vn.B16 * 3}, [Xm|SP], 48
ld3 {Vn.H8 * 3}, [Xm|SP], 48
ld3 {Vn.S4 * 3}, [Xm|SP], 48
ld3 {Vn.D2 * 3}, [Xm|SP], 48
ld3 {Vn.B16 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3 {Vn.B8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.H8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.H4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.S4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.S2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.D2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
ld3 {Vn.B * 3}[uimm], [Xm|SP]                                                       (#uimm < 16)
ld3 {Vn.H * 3}[uimm], [Xm|SP]                                                        (#uimm < 8)
ld3 {Vn.S * 3}[uimm], [Xm|SP]                                                        (#uimm < 4)
ld3 {Vn.D * 3}[uimm], [Xm|SP]                                                        (#uimm < 2)
ld3 {Vn.B * 3}[uimm], [Xm|SP], 3                                                    (#uimm < 16)
ld3 {Vn.B * 3}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
ld3 {Vn.H * 3}[uimm], [Xm|SP], 6                                                     (#uimm < 8)
ld3 {Vn.H * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
ld3 {Vn.S * 3}[uimm], [Xm|SP], 12                                                    (#uimm < 4)
ld3 {Vn.S * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
ld3 {Vn.D * 3}[uimm], [Xm|SP], 24                                                    (#uimm < 2)
ld3 {Vn.D * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

ld3r

ld3r {Vn.B16 * 3}, [Xm|SP]
ld3r {Vn.B8 * 3}, [Xm|SP]
ld3r {Vn.H8 * 3}, [Xm|SP]
ld3r {Vn.H4 * 3}, [Xm|SP]
ld3r {Vn.S4 * 3}, [Xm|SP]
ld3r {Vn.S2 * 3}, [Xm|SP]
ld3r {Vn.D2 * 3}, [Xm|SP]
ld3r {Vn.D1 * 3}, [Xm|SP]
ld3r {Vn.B16 * 3}, [Xm|SP], 3
ld3r {Vn.B8 * 3}, [Xm|SP], 3
ld3r {Vn.H8 * 3}, [Xm|SP], 6
ld3r {Vn.H4 * 3}, [Xm|SP], 6
ld3r {Vn.S4 * 3}, [Xm|SP], 12
ld3r {Vn.S2 * 3}, [Xm|SP], 12
ld3r {Vn.D2 * 3}, [Xm|SP], 24
ld3r {Vn.D1 * 3}, [Xm|SP], 24
ld3r {Vn.B16 * 3}, [Xm|SP], Xa                                                       (a is 0-30)
ld3r {Vn.B8 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.H8 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.H4 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.S4 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.S2 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.D2 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
ld3r {Vn.D1 * 3}, [Xm|SP], Xa                                                        (a is 0-30)

ld4

ld4 {Vn.B16 * 4}, [Xm|SP]
ld4 {Vn.B8 * 4}, [Xm|SP]
ld4 {Vn.H8 * 4}, [Xm|SP]
ld4 {Vn.H4 * 4}, [Xm|SP]
ld4 {Vn.S4 * 4}, [Xm|SP]
ld4 {Vn.S2 * 4}, [Xm|SP]
ld4 {Vn.D2 * 4}, [Xm|SP]
ld4 {Vn.B8 * 4}, [Xm|SP], 32
ld4 {Vn.H4 * 4}, [Xm|SP], 32
ld4 {Vn.S2 * 4}, [Xm|SP], 32
ld4 {Vn.B16 * 4}, [Xm|SP], 64
ld4 {Vn.H8 * 4}, [Xm|SP], 64
ld4 {Vn.S4 * 4}, [Xm|SP], 64
ld4 {Vn.D2 * 4}, [Xm|SP], 64
ld4 {Vn.B16 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4 {Vn.B8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.H8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.H4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.S4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.S2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.D2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
ld4 {Vn.B * 4}[uimm], [Xm|SP]                                                       (#uimm < 16)
ld4 {Vn.H * 4}[uimm], [Xm|SP]                                                        (#uimm < 8)
ld4 {Vn.S * 4}[uimm], [Xm|SP]                                                        (#uimm < 4)
ld4 {Vn.D * 4}[uimm], [Xm|SP]                                                        (#uimm < 2)
ld4 {Vn.B * 4}[uimm], [Xm|SP], 4                                                    (#uimm < 16)
ld4 {Vn.B * 4}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
ld4 {Vn.H * 4}[uimm], [Xm|SP], 8                                                     (#uimm < 8)
ld4 {Vn.H * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
ld4 {Vn.S * 4}[uimm], [Xm|SP], 16                                                    (#uimm < 4)
ld4 {Vn.S * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
ld4 {Vn.D * 4}[uimm], [Xm|SP], 32                                                    (#uimm < 2)
ld4 {Vn.D * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

ld4r

ld4r {Vn.B16 * 4}, [Xm|SP]
ld4r {Vn.B8 * 4}, [Xm|SP]
ld4r {Vn.H8 * 4}, [Xm|SP]
ld4r {Vn.H4 * 4}, [Xm|SP]
ld4r {Vn.S4 * 4}, [Xm|SP]
ld4r {Vn.S2 * 4}, [Xm|SP]
ld4r {Vn.D2 * 4}, [Xm|SP]
ld4r {Vn.D1 * 4}, [Xm|SP]
ld4r {Vn.B16 * 4}, [Xm|SP], 4
ld4r {Vn.B8 * 4}, [Xm|SP], 4
ld4r {Vn.H8 * 4}, [Xm|SP], 8
ld4r {Vn.H4 * 4}, [Xm|SP], 8
ld4r {Vn.S4 * 4}, [Xm|SP], 16
ld4r {Vn.S2 * 4}, [Xm|SP], 16
ld4r {Vn.D2 * 4}, [Xm|SP], 32
ld4r {Vn.D1 * 4}, [Xm|SP], 32
ld4r {Vn.B16 * 4}, [Xm|SP], Xa                                                       (a is 0-30)
ld4r {Vn.B8 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.H8 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.H4 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.S4 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.S2 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.D2 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
ld4r {Vn.D1 * 4}, [Xm|SP], Xa                                                        (a is 0-30)

ldadd

ldadd Wn, Wm, [Xa|SP]
ldadd Xn, Xm, [Xa|SP]

ldadda

ldadda Wn, Wm, [Xa|SP]
ldadda Xn, Xm, [Xa|SP]

ldaddab

ldaddab Wn, Wm, [Xa|SP]

ldaddah

ldaddah Wn, Wm, [Xa|SP]

ldaddal

ldaddal Wn, Wm, [Xa|SP]
ldaddal Xn, Xm, [Xa|SP]

ldaddalb

ldaddalb Wn, Wm, [Xa|SP]

ldaddalh

ldaddalh Wn, Wm, [Xa|SP]

ldaddb

ldaddb Wn, Wm, [Xa|SP]

ldaddh

ldaddh Wn, Wm, [Xa|SP]

ldaddl

ldaddl Wn, Wm, [Xa|SP]
ldaddl Xn, Xm, [Xa|SP]

ldaddlb

ldaddlb Wn, Wm, [Xa|SP]

ldaddlh

ldaddlh Wn, Wm, [Xa|SP]

ldapr

ldapr Wn, [Xm|SP]
ldapr Xn, [Xm|SP]

ldaprb

ldaprb Wn, [Xm|SP]

ldaprh

ldaprh Wn, [Xm|SP]

ldapur

ldapur Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)
ldapur Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldapurb

ldapurb Wn, [Xm|SP {, #simm } ]                                            (-256 <= #simm < 256)

ldapurh

ldapurh Wn, [Xm|SP {, #simm } ]                                            (-256 <= #simm < 256)

ldapursb

ldapursb Wn, [Xm|SP {, #simm } ]                                           (-256 <= #simm < 256)
ldapursb Xn, [Xm|SP {, #simm } ]                                           (-256 <= #simm < 256)

ldapursh

ldapursh Wn, [Xm|SP {, #simm } ]                                           (-256 <= #simm < 256)
ldapursh Xn, [Xm|SP {, #simm } ]                                           (-256 <= #simm < 256)

ldapursw

ldapursw Xn, [Xm|SP {, #simm } ]                                           (-256 <= #simm < 256)

ldar

ldar Wn, [Xm|SP]
ldar Xn, [Xm|SP]

ldarb

ldarb Wn, [Xm|SP]

ldarh

ldarh Wn, [Xm|SP]

ldaxp

ldaxp Wn, Wm, [Xa|SP]
ldaxp Xn, Xm, [Xa|SP]

ldaxr

ldaxr Wn, [Xm|SP]
ldaxr Xn, [Xm|SP]

ldaxrb

ldaxrb Wn, [Xm|SP]

ldaxrh

ldaxrh Wn, [Xm|SP]

ldclr

ldclr Wn, Wm, [Xa|SP]
ldclr Xn, Xm, [Xa|SP]

ldclra

ldclra Wn, Wm, [Xa|SP]
ldclra Xn, Xm, [Xa|SP]

ldclrab

ldclrab Wn, Wm, [Xa|SP]

ldclrah

ldclrah Wn, Wm, [Xa|SP]

ldclral

ldclral Wn, Wm, [Xa|SP]
ldclral Xn, Xm, [Xa|SP]

ldclralb

ldclralb Wn, Wm, [Xa|SP]

ldclralh

ldclralh Wn, Wm, [Xa|SP]

ldclrb

ldclrb Wn, Wm, [Xa|SP]

ldclrh

ldclrh Wn, Wm, [Xa|SP]

ldclrl

ldclrl Wn, Wm, [Xa|SP]
ldclrl Xn, Xm, [Xa|SP]

ldclrlb

ldclrlb Wn, Wm, [Xa|SP]

ldclrlh

ldclrlh Wn, Wm, [Xa|SP]

ldeor

ldeor Wn, Wm, [Xa|SP]
ldeor Xn, Xm, [Xa|SP]

ldeora

ldeora Wn, Wm, [Xa|SP]
ldeora Xn, Xm, [Xa|SP]

ldeorab

ldeorab Wn, Wm, [Xa|SP]

ldeorah

ldeorah Wn, Wm, [Xa|SP]

ldeoral

ldeoral Wn, Wm, [Xa|SP]
ldeoral Xn, Xm, [Xa|SP]

ldeoralb

ldeoralb Wn, Wm, [Xa|SP]

ldeoralh

ldeoralh Wn, Wm, [Xa|SP]

ldeorb

ldeorb Wn, Wm, [Xa|SP]

ldeorh

ldeorh Wn, Wm, [Xa|SP]

ldeorl

ldeorl Wn, Wm, [Xa|SP]
ldeorl Xn, Xm, [Xa|SP]

ldeorlb

ldeorlb Wn, Wm, [Xa|SP]

ldeorlh

ldeorlh Wn, Wm, [Xa|SP]

ldlar

ldlar Wn, [Xm|SP]
ldlar Xn, [Xm|SP]

ldlarb

ldlarb Wn, [Xm|SP]

ldlarh

ldlarh Wn, [Xm|SP]

ldnp

ldnp Sn, Sm, [Xa|SP {, #simm } ]                            (-256 <= #simm < 256, #simm = 4 * N)
ldnp Dn, Dm, [Xa|SP {, #simm } ]                            (-512 <= #simm < 512, #simm = 8 * N)
ldnp Qn, Qm, [Xa|SP {, #simm } ]                         (-1024 <= #simm < 1024, #simm = 16 * N)
ldnp Wn, Wm, [Xa|SP {, #simm } ]                            (-256 <= #simm < 256, #simm = 4 * N)
ldnp Xn, Xm, [Xa|SP {, #simm } ]                            (-512 <= #simm < 512, #simm = 8 * N)

ldp

ldp Sn, Sm, [Xa|SP], #simm                                  (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP], #simm                                  (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP], #simm                               (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Sn, Sm, [Xa|SP, #simm]!                                 (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP, #simm]!                                 (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP, #simm]!                              (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Sn, Sm, [Xa|SP {, #simm } ]                             (-256 <= #simm < 256, #simm = 4 * N)
ldp Dn, Dm, [Xa|SP {, #simm } ]                             (-512 <= #simm < 512, #simm = 8 * N)
ldp Qn, Qm, [Xa|SP {, #simm } ]                          (-1024 <= #simm < 1024, #simm = 16 * N)
ldp Wn, Wm, [Xa|SP], #simm                                  (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP], #simm                                  (-512 <= #simm < 512, #simm = 8 * N)
ldp Wn, Wm, [Xa|SP, #simm]!                                 (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP, #simm]!                                 (-512 <= #simm < 512, #simm = 8 * N)
ldp Wn, Wm, [Xa|SP {, #simm } ]                             (-256 <= #simm < 256, #simm = 4 * N)
ldp Xn, Xm, [Xa|SP {, #simm } ]                             (-512 <= #simm < 512, #simm = 8 * N)

ldpsw

ldpsw Xn, Xm, [Xa|SP], #simm                                (-256 <= #simm < 256, #simm = 4 * N)
ldpsw Xn, Xm, [Xa|SP, #simm]!                               (-256 <= #simm < 256, #simm = 4 * N)
ldpsw Xn, Xm, [Xa|SP {, #simm } ]                           (-256 <= #simm < 256, #simm = 4 * N)

ldr

ldr Bn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Hn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Sn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Dn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Qn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Bn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Hn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Sn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Dn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Qn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Bn, [Xm|SP {, #uimm } ]                                                       (#uimm < 4096)
ldr Hn, [Xm|SP {, #uimm } ]                                        (#uimm < 8192, #uimm = 2 * N)
ldr Sn, [Xm|SP {, #uimm } ]                                       (#uimm < 16384, #uimm = 4 * N)
ldr Dn, [Xm|SP {, #uimm } ]                                       (#uimm < 32768, #uimm = 8 * N)
ldr Qn, [Xm|SP {, #uimm } ]                                      (#uimm < 65536, #uimm = 16 * N)
ldr Wn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Xn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
ldr Wn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Xn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
ldr Wn, [Xm|SP {, #uimm } ]                                       (#uimm < 16384, #uimm = 4 * N)
ldr Xn, [Xm|SP {, #uimm } ]                                       (#uimm < 32768, #uimm = 8 * N)
ldr Sn, <offset>                                              (offset is 19 bit, 4-byte aligned)
ldr Dn, <offset>                                              (offset is 19 bit, 4-byte aligned)
ldr Qn, <offset>                                              (offset is 19 bit, 4-byte aligned)
ldr Wn, <offset>                                              (offset is 19 bit, 4-byte aligned)
ldr Xn, <offset>                                              (offset is 19 bit, 4-byte aligned)
ldr Bn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 0])
ldr Hn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 1])
ldr Sn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 2])
ldr Dn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 3])
ldr Qn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 4])
ldr Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 2])
ldr Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 3])

ldraa

ldraa Xn, [Xm|SP {, #simm } ]                             (-4096 <= #simm < 4096, #simm = 8 * N)
ldraa Xn, [Xm|SP, #simm]!                                 (-4096 <= #simm < 4096, #simm = 8 * N)

ldrab

ldrab Xn, [Xm|SP {, #simm } ]                             (-4096 <= #simm < 4096, #simm = 8 * N)
ldrab Xn, [Xm|SP, #simm]!                                 (-4096 <= #simm < 4096, #simm = 8 * N)

ldrb

ldrb Wn, [Xm|SP], #simm                                                    (-256 <= #simm < 256)
ldrb Wn, [Xm|SP, #simm]!                                                   (-256 <= #simm < 256)
ldrb Wn, [Xm|SP {, #uimm } ]                                                      (#uimm < 4096)
ldrb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                     (#uimm = [0, 0])

ldrh

ldrh Wn, [Xm|SP], #simm                                                    (-256 <= #simm < 256)
ldrh Wn, [Xm|SP, #simm]!                                                   (-256 <= #simm < 256)
ldrh Wn, [Xm|SP {, #uimm } ]                                       (#uimm < 8192, #uimm = 2 * N)
ldrh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                     (#uimm = [0, 1])

ldrsb

ldrsb Wn, [Xm|SP], #simm                                                   (-256 <= #simm < 256)
ldrsb Xn, [Xm|SP], #simm                                                   (-256 <= #simm < 256)
ldrsb Wn, [Xm|SP, #simm]!                                                  (-256 <= #simm < 256)
ldrsb Xn, [Xm|SP, #simm]!                                                  (-256 <= #simm < 256)
ldrsb Wn, [Xm|SP {, #uimm } ]                                                     (#uimm < 4096)
ldrsb Xn, [Xm|SP {, #uimm } ]                                                     (#uimm < 4096)
ldrsb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                    (#uimm = [0, 0])
ldrsb Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                    (#uimm = [0, 0])

ldrsh

ldrsh Wn, [Xm|SP], #simm                                                   (-256 <= #simm < 256)
ldrsh Xn, [Xm|SP], #simm                                                   (-256 <= #simm < 256)
ldrsh Wn, [Xm|SP, #simm]!                                                  (-256 <= #simm < 256)
ldrsh Xn, [Xm|SP, #simm]!                                                  (-256 <= #simm < 256)
ldrsh Wn, [Xm|SP {, #uimm } ]                                      (#uimm < 8192, #uimm = 2 * N)
ldrsh Xn, [Xm|SP {, #uimm } ]                                      (#uimm < 8192, #uimm = 2 * N)
ldrsh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                    (#uimm = [0, 1])
ldrsh Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                    (#uimm = [0, 1])

ldrsw

ldrsw Xn, [Xm|SP], #simm                                                   (-256 <= #simm < 256)
ldrsw Xn, [Xm|SP, #simm]!                                                  (-256 <= #simm < 256)
ldrsw Xn, [Xm|SP {, #uimm } ]                                     (#uimm < 16384, #uimm = 4 * N)
ldrsw Xn, <offset>                                            (offset is 19 bit, 4-byte aligned)
ldrsw Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                    (#uimm = [0, 2])

ldset

ldset Wn, Wm, [Xa|SP]
ldset Xn, Xm, [Xa|SP]

ldseta

ldseta Wn, Wm, [Xa|SP]
ldseta Xn, Xm, [Xa|SP]

ldsetab

ldsetab Wn, Wm, [Xa|SP]

ldsetah

ldsetah Wn, Wm, [Xa|SP]

ldsetal

ldsetal Wn, Wm, [Xa|SP]
ldsetal Xn, Xm, [Xa|SP]

ldsetalb

ldsetalb Wn, Wm, [Xa|SP]

ldsetalh

ldsetalh Wn, Wm, [Xa|SP]

ldsetb

ldsetb Wn, Wm, [Xa|SP]

ldseth

ldseth Wn, Wm, [Xa|SP]

ldsetl

ldsetl Wn, Wm, [Xa|SP]
ldsetl Xn, Xm, [Xa|SP]

ldsetlb

ldsetlb Wn, Wm, [Xa|SP]

ldsetlh

ldsetlh Wn, Wm, [Xa|SP]

ldsmax

ldsmax Wn, Wm, [Xa|SP]
ldsmax Xn, Xm, [Xa|SP]

ldsmaxa

ldsmaxa Wn, Wm, [Xa|SP]
ldsmaxa Xn, Xm, [Xa|SP]

ldsmaxab

ldsmaxab Wn, Wm, [Xa|SP]

ldsmaxah

ldsmaxah Wn, Wm, [Xa|SP]

ldsmaxal

ldsmaxal Wn, Wm, [Xa|SP]
ldsmaxal Xn, Xm, [Xa|SP]

ldsmaxalb

ldsmaxalb Wn, Wm, [Xa|SP]

ldsmaxalh

ldsmaxalh Wn, Wm, [Xa|SP]

ldsmaxb

ldsmaxb Wn, Wm, [Xa|SP]

ldsmaxh

ldsmaxh Wn, Wm, [Xa|SP]

ldsmaxl

ldsmaxl Wn, Wm, [Xa|SP]
ldsmaxl Xn, Xm, [Xa|SP]

ldsmaxlb

ldsmaxlb Wn, Wm, [Xa|SP]

ldsmaxlh

ldsmaxlh Wn, Wm, [Xa|SP]

ldsmin

ldsmin Wn, Wm, [Xa|SP]
ldsmin Xn, Xm, [Xa|SP]

ldsmina

ldsmina Wn, Wm, [Xa|SP]
ldsmina Xn, Xm, [Xa|SP]

ldsminab

ldsminab Wn, Wm, [Xa|SP]

ldsminah

ldsminah Wn, Wm, [Xa|SP]

ldsminal

ldsminal Wn, Wm, [Xa|SP]
ldsminal Xn, Xm, [Xa|SP]

ldsminalb

ldsminalb Wn, Wm, [Xa|SP]

ldsminalh

ldsminalh Wn, Wm, [Xa|SP]

ldsminb

ldsminb Wn, Wm, [Xa|SP]

ldsminh

ldsminh Wn, Wm, [Xa|SP]

ldsminl

ldsminl Wn, Wm, [Xa|SP]
ldsminl Xn, Xm, [Xa|SP]

ldsminlb

ldsminlb Wn, Wm, [Xa|SP]

ldsminlh

ldsminlh Wn, Wm, [Xa|SP]

ldtr

ldtr Wn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldtr Xn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)

ldtrb

ldtrb Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

ldtrh

ldtrh Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

ldtrsb

ldtrsb Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)
ldtrsb Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldtrsh

ldtrsh Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)
ldtrsh Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldtrsw

ldtrsw Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldumax

ldumax Wn, Wm, [Xa|SP]
ldumax Xn, Xm, [Xa|SP]

ldumaxa

ldumaxa Wn, Wm, [Xa|SP]
ldumaxa Xn, Xm, [Xa|SP]

ldumaxab

ldumaxab Wn, Wm, [Xa|SP]

ldumaxah

ldumaxah Wn, Wm, [Xa|SP]

ldumaxal

ldumaxal Wn, Wm, [Xa|SP]
ldumaxal Xn, Xm, [Xa|SP]

ldumaxalb

ldumaxalb Wn, Wm, [Xa|SP]

ldumaxalh

ldumaxalh Wn, Wm, [Xa|SP]

ldumaxb

ldumaxb Wn, Wm, [Xa|SP]

ldumaxh

ldumaxh Wn, Wm, [Xa|SP]

ldumaxl

ldumaxl Wn, Wm, [Xa|SP]
ldumaxl Xn, Xm, [Xa|SP]

ldumaxlb

ldumaxlb Wn, Wm, [Xa|SP]

ldumaxlh

ldumaxlh Wn, Wm, [Xa|SP]

ldumin

ldumin Wn, Wm, [Xa|SP]
ldumin Xn, Xm, [Xa|SP]

ldumina

ldumina Wn, Wm, [Xa|SP]
ldumina Xn, Xm, [Xa|SP]

lduminab

lduminab Wn, Wm, [Xa|SP]

lduminah

lduminah Wn, Wm, [Xa|SP]

lduminal

lduminal Wn, Wm, [Xa|SP]
lduminal Xn, Xm, [Xa|SP]

lduminalb

lduminalb Wn, Wm, [Xa|SP]

lduminalh

lduminalh Wn, Wm, [Xa|SP]

lduminb

lduminb Wn, Wm, [Xa|SP]

lduminh

lduminh Wn, Wm, [Xa|SP]

lduminl

lduminl Wn, Wm, [Xa|SP]
lduminl Xn, Xm, [Xa|SP]

lduminlb

lduminlb Wn, Wm, [Xa|SP]

lduminlh

lduminlh Wn, Wm, [Xa|SP]

ldur

ldur Bn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Hn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Sn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Dn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Qn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Wn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
ldur Xn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)

ldurb

ldurb Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

ldurh

ldurh Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

ldursb

ldursb Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)
ldursb Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldursh

ldursh Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)
ldursh Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldursw

ldursw Xn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

ldxp

ldxp Wn, Wm, [Xa|SP]
ldxp Xn, Xm, [Xa|SP]

ldxr

ldxr Wn, [Xm|SP]
ldxr Xn, [Xm|SP]

ldxrb

ldxrb Wn, [Xm|SP]

ldxrh

ldxrh Wn, [Xm|SP]

lsl

lsl Wn, Wm, Wa
lsl Xn, Xm, Xa
lsl Wn, Wm, #uimm                                                              (0 <= #uimm < 32)
lsl Xn, Xm, #uimm                                                              (0 <= #uimm < 64)

lslv

lslv Wn, Wm, Wa
lslv Xn, Xm, Xa

lsr

lsr Wn, Wm, Wa
lsr Xn, Xm, Xa
lsr Wn, Wm, #uimm                                                                   (#uimm < 32)
lsr Xn, Xm, #uimm                                                                   (#uimm < 64)

lsrv

lsrv Wn, Wm, Wa
lsrv Xn, Xm, Xa

madd

madd Wn, Wm, Wa, Wb
madd Xn, Xm, Xa, Xb

mla

mla Vn.H8, Vm.H8, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mla Vn.H4, Vm.H4, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mla Vn.S4, Vm.S4, Va.S[uimm]                                                         (#uimm < 4)
mla Vn.S2, Vm.S2, Va.S[uimm]                                                         (#uimm < 4)
mla Vn.B16, Vm.B16, Va.B16
mla Vn.B8, Vm.B8, Va.B8
mla Vn.H8, Vm.H8, Va.H8
mla Vn.H4, Vm.H4, Va.H4
mla Vn.S4, Vm.S4, Va.S4
mla Vn.S2, Vm.S2, Va.S2

mls

mls Vn.H8, Vm.H8, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mls Vn.H4, Vm.H4, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mls Vn.S4, Vm.S4, Va.S[uimm]                                                         (#uimm < 4)
mls Vn.S2, Vm.S2, Va.S[uimm]                                                         (#uimm < 4)
mls Vn.B16, Vm.B16, Va.B16
mls Vn.B8, Vm.B8, Va.B8
mls Vn.H8, Vm.H8, Va.H8
mls Vn.H4, Vm.H4, Va.H4
mls Vn.S4, Vm.S4, Va.S4
mls Vn.S2, Vm.S2, Va.S2

mneg

mneg Wn, Wm, Wa
mneg Xn, Xm, Xa

mov

mov Wn, Wm
mov Xn, Xm
mov Wn|WSP, Wm|WSP
mov Xn|SP, Xm|SP
mov Bn, Vm.B[uimm]                                                                  (#uimm < 16)
mov Hn, Vm.H[uimm]                                                                   (#uimm < 8)
mov Sn, Vm.S[uimm]                                                                   (#uimm < 4)
mov Dn, Vm.D[uimm]                                                                   (#uimm < 2)
mov Vn.B[uimm], Vm.B[uimm1]                                            (#uimm < 16, #uimm1 < 16)
mov Vn.H[uimm], Vm.H[uimm1]                                              (#uimm < 8, #uimm1 < 8)
mov Vn.S[uimm], Vm.S[uimm1]                                              (#uimm < 4, #uimm1 < 4)
mov Vn.D[uimm], Vm.D[uimm1]                                              (#uimm < 2, #uimm1 < 2)
mov Vn.B[uimm], Wm                                                                  (#uimm < 16)
mov Vn.H[uimm], Wm                                                                   (#uimm < 8)
mov Vn.S[uimm], Wm                                                                   (#uimm < 4)
mov Vn.D[uimm], Xm                                                                   (#uimm < 2)
mov.inverted Wn, #imm                                                 (#imm is a wide immediate)
mov.inverted Xn, #imm                                                 (#imm is a wide immediate)
mov Wn, #imm                                                          (#imm is a wide immediate)
mov Xn, #imm                                                          (#imm is a wide immediate)
mov Vn.B16, Vm.B16
mov Vn.B8, Vm.B8
mov.logical Wn|WSP, #imm                                           (#imm is a logical immediate)
mov.logical Xn|SP, #imm                                            (#imm is a logical immediate)
mov Wn, Vm.S[uimm]                                                                   (#uimm < 4)
mov Xn, Vm.D[uimm]                                                                   (#uimm < 2)

movi

movi Vn.B16, #uimm {, LSL #uimm1 }                                     (#uimm < 256, #uimm1 < 1)
movi Vn.B8, #uimm {, LSL #uimm1 }                                      (#uimm < 256, #uimm1 < 1)
movi Vn.H8, #uimm {, LSL #uimm1 }                                 (#uimm < 256, #uimm1 = [0, 8])
movi Vn.H4, #uimm {, LSL #uimm1 }                                 (#uimm < 256, #uimm1 = [0, 8])
movi Vn.S4, #uimm {, LSL #uimm1 }                         (#uimm < 256, #uimm1 = [0, 8, 16, 24])
movi Vn.S2, #uimm {, LSL #uimm1 }                         (#uimm < 256, #uimm1 = [0, 8, 16, 24])
movi Vn.S4, #uimm, MSL #uimm1                                    (#uimm < 256, #uimm1 = [8, 16])
movi Vn.S2, #uimm, MSL #uimm1                                    (#uimm < 256, #uimm1 = [8, 16])
movi Dn, #imm                                                    (#imm is a stretched immediate)
movi Vn.D2, #imm                                                 (#imm is a stretched immediate)

movk

movk Wn, #uimm {, LSL #uimm1 }                                 (#uimm < 65536, #uimm1 = [0, 16])
movk Xn, #uimm {, LSL #uimm1 }                         (#uimm < 65536, #uimm1 = [0, 16, 32, 48])

movn

movn Wn, #uimm {, LSL #uimm1 }                                 (#uimm < 65536, #uimm1 = [0, 16])
movn Xn, #uimm {, LSL #uimm1 }                         (#uimm < 65536, #uimm1 = [0, 16, 32, 48])

movz

movz Wn, #uimm {, LSL #uimm1 }                                 (#uimm < 65536, #uimm1 = [0, 16])
movz Xn, #uimm {, LSL #uimm1 }                         (#uimm < 65536, #uimm1 = [0, 16, 32, 48])

mrs

mrs Xn, #uimm                                                                    (#uimm < 32768)

msr

msr msr_imm_op, #uimm                                                               (#uimm < 16)
msr #uimm, Xn                                                                    (#uimm < 32768)

msub

msub Wn, Wm, Wa, Wb
msub Xn, Xm, Xa, Xb

mul

mul Vn.H8, Vm.H8, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mul Vn.H4, Vm.H4, Va.H[uimm]                                              (a is 0-15, #uimm < 8)
mul Vn.S4, Vm.S4, Va.S[uimm]                                                         (#uimm < 4)
mul Vn.S2, Vm.S2, Va.S[uimm]                                                         (#uimm < 4)
mul Vn.B16, Vm.B16, Va.B16
mul Vn.B8, Vm.B8, Va.B8
mul Vn.H8, Vm.H8, Va.H8
mul Vn.H4, Vm.H4, Va.H4
mul Vn.S4, Vm.S4, Va.S4
mul Vn.S2, Vm.S2, Va.S2
mul Wn, Wm, Wa
mul Xn, Xm, Xa

mvn

mvn Wn, Wm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 32)
mvn Xn, Xm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 64)
mvn Vn.B16, Vm.B16
mvn Vn.B8, Vm.B8

mvni

mvni Vn.H8, #uimm {, LSL #uimm1 }                                 (#uimm < 256, #uimm1 = [0, 8])
mvni Vn.H4, #uimm {, LSL #uimm1 }                                 (#uimm < 256, #uimm1 = [0, 8])
mvni Vn.S4, #uimm {, LSL #uimm1 }                         (#uimm < 256, #uimm1 = [0, 8, 16, 24])
mvni Vn.S2, #uimm {, LSL #uimm1 }                         (#uimm < 256, #uimm1 = [0, 8, 16, 24])
mvni Vn.S4, #uimm, MSL #uimm1                                    (#uimm < 256, #uimm1 = [8, 16])
mvni Vn.S2, #uimm, MSL #uimm1                                    (#uimm < 256, #uimm1 = [8, 16])

neg

neg Wn, Wm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 32)
neg Xn, Xm {, LSL|LSR|ASR #uimm }                                                   (#uimm < 64)
neg Dn, Dm
neg Vn.B16, Vm.B16
neg Vn.B8, Vm.B8
neg Vn.H8, Vm.H8
neg Vn.H4, Vm.H4
neg Vn.S4, Vm.S4
neg Vn.S2, Vm.S2
neg Vn.D2, Vm.D2

negs

negs Wn, Wm {, LSL|LSR|ASR #uimm }                                                  (#uimm < 32)
negs Xn, Xm {, LSL|LSR|ASR #uimm }                                                  (#uimm < 64)

ngc

ngc Wn, Wm
ngc Xn, Xm

ngcs

ngcs Wn, Wm
ngcs Xn, Xm

nop

nop

not

not Vn.B16, Vm.B16
not Vn.B8, Vm.B8

orn

orn Vn.B16, Vm.B16, Va.B16
orn Vn.B8, Vm.B8, Va.B8
orn Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
orn Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

orr

orr Vn.H8, #uimm {, LSL #uimm1 }                                  (#uimm < 256, #uimm1 = [0, 8])
orr Vn.H4, #uimm {, LSL #uimm1 }                                  (#uimm < 256, #uimm1 = [0, 8])
orr Vn.S4, #uimm {, LSL #uimm1 }                          (#uimm < 256, #uimm1 = [0, 8, 16, 24])
orr Vn.S2, #uimm {, LSL #uimm1 }                          (#uimm < 256, #uimm1 = [0, 8, 16, 24])
orr Vn.B16, Vm.B16, Va.B16
orr Vn.B8, Vm.B8, Va.B8
orr Wn|WSP, Wm, #imm                                               (#imm is a logical immediate)
orr Xn|SP, Xm, #imm                                                (#imm is a logical immediate)
orr Wn, Wm, Wa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 32)
orr Xn, Xm, Xa {, LSL|LSR|ASR|ROR #uimm }                                           (#uimm < 64)

pacda

pacda Xn, Xm|SP

pacdb

pacdb Xn, Xm|SP

pacdza

pacdza Xn

pacdzb

pacdzb Xn

pacga

pacga Xn, Xm, Xa|SP

pacia

pacia Xn, Xm|SP

pacia1716

pacia1716

paciasp

paciasp

paciaz

paciaz

pacib

pacib Xn, Xm|SP

pacib1716

pacib1716

pacibsp

pacibsp

pacibz

pacibz

paciza

paciza Xn

pacizb

pacizb Xn

pmul

pmul Vn.B16, Vm.B16, Va.B16
pmul Vn.B8, Vm.B8, Va.B8

pmull

pmull Vn.H8, Vm.B8, Va.B8
pmull Vn.Q1, Vm.D1, Va.D1

pmull2

pmull2 Vn.H8, Vm.B16, Va.B16
pmull2 Vn.Q1, Vm.D2, Va.D2

prfm

prfm #uimm, <offset>                              (#uimm < 32, offset is 19 bit, 4-byte aligned)
prfm #uimm, [Xn|SP, Wm|Xm { , UXTW|LSL|SXTW|SXTX { #uimm1 } } ]    (#uimm < 32, #uimm1 = [0, 3])

prfum

prfum #uimm, [Xn|SP {, #simm1 } ]                             (#uimm < 32, -256 <= #simm1 < 256)

psb

psb csync

pssbb

pssbb

raddhn

raddhn Vn.B8, Vm.H8, Va.H8
raddhn Vn.H4, Vm.S4, Va.S4
raddhn Vn.S2, Vm.D2, Va.D2

raddhn2

raddhn2 Vn.B16, Vm.H8, Va.H8
raddhn2 Vn.H8, Vm.S4, Va.S4
raddhn2 Vn.S4, Vm.D2, Va.D2

rax1

rax1 Vn.D2, Vm.D2, Va.D2

rbit

rbit Vn.B16, Vm.B16
rbit Vn.B8, Vm.B8
rbit Wn, Wm
rbit Xn, Xm

ret

ret Xn
ret

retaa

retaa

retab

retab

rev

rev Wn, Wm
rev Xn, Xm

rev16

rev16 Vn.B16, Vm.B16
rev16 Vn.B8, Vm.B8
rev16 Wn, Wm
rev16 Xn, Xm

rev32

rev32 Vn.B16, Vm.B16
rev32 Vn.B8, Vm.B8
rev32 Vn.H8, Vm.H8
rev32 Vn.H4, Vm.H4
rev32 Xn, Xm

rev64

rev64 Vn.B16, Vm.B16
rev64 Vn.B8, Vm.B8
rev64 Vn.H8, Vm.H8
rev64 Vn.H4, Vm.H4
rev64 Vn.S4, Vm.S4
rev64 Vn.S2, Vm.S2
rev64 Xn, Xm

rmif

rmif Xn, #uimm, #uimm1                                                 (#uimm < 64, #uimm1 < 16)

ror

ror Wn, Wm, #uimm                                                                   (#uimm < 32)
ror Xn, Xm, #uimm                                                                   (#uimm < 64)
ror Wn, Wm, Wa
ror Xn, Xm, Xa

rorv

rorv Wn, Wm, Wa
rorv Xn, Xm, Xa

rshrn

rshrn Vn.B8, Vm.H8, #uimm                                                      (1 <= #uimm <= 8)
rshrn Vn.H4, Vm.S4, #uimm                                                     (1 <= #uimm <= 16)
rshrn Vn.S2, Vm.D2, #uimm                                                     (1 <= #uimm <= 32)

rshrn2

rshrn2 Vn.B16, Vm.H8, #uimm                                                    (1 <= #uimm <= 8)
rshrn2 Vn.H8, Vm.S4, #uimm                                                    (1 <= #uimm <= 16)
rshrn2 Vn.S4, Vm.D2, #uimm                                                    (1 <= #uimm <= 32)

rsubhn

rsubhn Vn.B8, Vm.H8, Va.H8
rsubhn Vn.H4, Vm.S4, Va.S4
rsubhn Vn.S2, Vm.D2, Va.D2

rsubhn2

rsubhn2 Vn.B16, Vm.H8, Va.H8
rsubhn2 Vn.H8, Vm.S4, Va.S4
rsubhn2 Vn.S4, Vm.D2, Va.D2

saba

saba Vn.B16, Vm.B16, Va.B16
saba Vn.B8, Vm.B8, Va.B8
saba Vn.H8, Vm.H8, Va.H8
saba Vn.H4, Vm.H4, Va.H4
saba Vn.S4, Vm.S4, Va.S4
saba Vn.S2, Vm.S2, Va.S2

sabal

sabal Vn.H8, Vm.B8, Va.B8
sabal Vn.S4, Vm.H4, Va.H4
sabal Vn.D2, Vm.S2, Va.S2

sabal2

sabal2 Vn.H8, Vm.B16, Va.B16
sabal2 Vn.S4, Vm.H8, Va.H8
sabal2 Vn.D2, Vm.S4, Va.S4

sabd

sabd Vn.B16, Vm.B16, Va.B16
sabd Vn.B8, Vm.B8, Va.B8
sabd Vn.H8, Vm.H8, Va.H8
sabd Vn.H4, Vm.H4, Va.H4
sabd Vn.S4, Vm.S4, Va.S4
sabd Vn.S2, Vm.S2, Va.S2

sabdl

sabdl Vn.H8, Vm.B8, Va.B8
sabdl Vn.S4, Vm.H4, Va.H4
sabdl Vn.D2, Vm.S2, Va.S2

sabdl2

sabdl2 Vn.H8, Vm.B16, Va.B16
sabdl2 Vn.S4, Vm.H8, Va.H8
sabdl2 Vn.D2, Vm.S4, Va.S4

sadalp

sadalp Vn.H8, Vm.B16
sadalp Vn.H4, Vm.B8
sadalp Vn.S4, Vm.H8
sadalp Vn.S2, Vm.H4
sadalp Vn.D2, Vm.S4
sadalp Vn.D1, Vm.S2

saddl

saddl Vn.H8, Vm.B8, Va.B8
saddl Vn.S4, Vm.H4, Va.H4
saddl Vn.D2, Vm.S2, Va.S2

saddl2

saddl2 Vn.H8, Vm.B16, Va.B16
saddl2 Vn.S4, Vm.H8, Va.H8
saddl2 Vn.D2, Vm.S4, Va.S4

saddlp

saddlp Vn.H8, Vm.B16
saddlp Vn.H4, Vm.B8
saddlp Vn.S4, Vm.H8
saddlp Vn.S2, Vm.H4
saddlp Vn.D2, Vm.S4
saddlp Vn.D1, Vm.S2

saddlv

saddlv Hn, Vm.B16
saddlv Hn, Vm.B8
saddlv Sn, Vm.H8
saddlv Sn, Vm.H4
saddlv Dn, Vm.S4

saddw

saddw Vn.H8, Vm.H8, Va.B8
saddw Vn.S4, Vm.S4, Va.H4
saddw Vn.D2, Vm.D2, Va.S2

saddw2

saddw2 Vn.H8, Vm.H8, Va.B16
saddw2 Vn.S4, Vm.S4, Va.H8
saddw2 Vn.D2, Vm.D2, Va.S4

sb

sb

sbc

sbc Wn, Wm, Wa
sbc Xn, Xm, Xa

sbcs

sbcs Wn, Wm, Wa
sbcs Xn, Xm, Xa

sbfiz

sbfiz Wn, Wm, #uimm, #uimm1                          (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
sbfiz Xn, Xm, #uimm, #uimm1                          (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)

sbfm

sbfm Wn, Wm, #uimm, #uimm1                                             (#uimm < 32, #uimm1 < 32)
sbfm Xn, Xm, #uimm, #uimm1                                (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

sbfx

sbfx Wn, Wm, #uimm, #uimm1                                (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
sbfx Xn, Xm, #uimm, #uimm1                                (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

scvtf

scvtf Hn, Hm, #uimm                                                           (1 <= #uimm <= 16)
scvtf Sn, Sm, #uimm                                                           (1 <= #uimm <= 32)
scvtf Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
scvtf Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
scvtf Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
scvtf Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
scvtf Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
scvtf Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)
scvtf Hn, Hm
scvtf Sn, Sm
scvtf Dn, Dm
scvtf Vn.H8, Vm.H8
scvtf Vn.H4, Vm.H4
scvtf Vn.S4, Vm.S4
scvtf Vn.S2, Vm.S2
scvtf Vn.D2, Vm.D2
scvtf Hn, Wm, #uimm                                                           (1 <= #uimm <= 32)
scvtf Sn, Wm, #uimm                                                           (1 <= #uimm <= 32)
scvtf Dn, Wm, #uimm                                                           (1 <= #uimm <= 32)
scvtf Hn, Xm, #uimm                                                           (1 <= #uimm <= 64)
scvtf Sn, Xm, #uimm                                                           (1 <= #uimm <= 64)
scvtf Dn, Xm, #uimm                                                           (1 <= #uimm <= 64)
scvtf Hn, Wm
scvtf Sn, Wm
scvtf Dn, Wm
scvtf Hn, Xm
scvtf Sn, Xm
scvtf Dn, Xm

sdiv

sdiv Wn, Wm, Wa
sdiv Xn, Xm, Xa

sdot

sdot Vn.S2, Vm.B8, Va.B4[uimm]                                                       (#uimm < 4)
sdot Vn.S4, Vm.B16, Va.B4[uimm]                                                      (#uimm < 4)
sdot Vn.S2, Vm.B8, Va.B8
sdot Vn.S4, Vm.B16, Va.B16

setf16

setf16 Wn

setf8

setf8 Wn

sev

sev

sevl

sevl

sha1c

sha1c Qn, Sm, Va.S4

sha1h

sha1h Sn, Sm

sha1m

sha1m Qn, Sm, Va.S4

sha1p

sha1p Qn, Sm, Va.S4

sha1su0

sha1su0 Vn.S4, Vm.S4, Va.S4

sha1su1

sha1su1 Vn.S4, Vm.S4

sha256h

sha256h Qn, Qm, Va.S4

sha256h2

sha256h2 Qn, Qm, Va.S4

sha256su0

sha256su0 Vn.S4, Vm.S4

sha256su1

sha256su1 Vn.S4, Vm.S4, Va.S4

sha512h

sha512h Qn, Qm, Va.D2

sha512h2

sha512h2 Qn, Qm, Va.D2

sha512su0

sha512su0 Vn.D2, Vm.D2

sha512su1

sha512su1 Vn.D2, Vm.D2, Va.D2

shadd

shadd Vn.B16, Vm.B16, Va.B16
shadd Vn.B8, Vm.B8, Va.B8
shadd Vn.H8, Vm.H8, Va.H8
shadd Vn.H4, Vm.H4, Va.H4
shadd Vn.S4, Vm.S4, Va.S4
shadd Vn.S2, Vm.S2, Va.S2

shl

shl Dn, Dm, #uimm                                                                   (#uimm < 64)
shl Vn.B16, Vm.B16, #uimm                                                            (#uimm < 8)
shl Vn.B8, Vm.B8, #uimm                                                              (#uimm < 8)
shl Vn.H8, Vm.H8, #uimm                                                             (#uimm < 16)
shl Vn.H4, Vm.H4, #uimm                                                             (#uimm < 16)
shl Vn.S4, Vm.S4, #uimm                                                             (#uimm < 32)
shl Vn.S2, Vm.S2, #uimm                                                             (#uimm < 32)
shl Vn.D2, Vm.D2, #uimm                                                             (#uimm < 64)

shll

shll Vn.H8, Vm.B8, 8
shll Vn.S4, Vm.H4, 16
shll Vn.D2, Vm.S2, 32

shll2

shll2 Vn.H8, Vm.B16, 8
shll2 Vn.S4, Vm.H8, 16
shll2 Vn.D2, Vm.S4, 32

shrn

shrn Vn.B8, Vm.H8, #uimm                                                       (1 <= #uimm <= 8)
shrn Vn.H4, Vm.S4, #uimm                                                      (1 <= #uimm <= 16)
shrn Vn.S2, Vm.D2, #uimm                                                      (1 <= #uimm <= 32)

shrn2

shrn2 Vn.B16, Vm.H8, #uimm                                                     (1 <= #uimm <= 8)
shrn2 Vn.H8, Vm.S4, #uimm                                                     (1 <= #uimm <= 16)
shrn2 Vn.S4, Vm.D2, #uimm                                                     (1 <= #uimm <= 32)

shsub

shsub Vn.B16, Vm.B16, Va.B16
shsub Vn.B8, Vm.B8, Va.B8
shsub Vn.H8, Vm.H8, Va.H8
shsub Vn.H4, Vm.H4, Va.H4
shsub Vn.S4, Vm.S4, Va.S4
shsub Vn.S2, Vm.S2, Va.S2

sli

sli Dn, Dm, #uimm                                                                   (#uimm < 64)
sli Vn.B16, Vm.B16, #uimm                                                            (#uimm < 8)
sli Vn.B8, Vm.B8, #uimm                                                              (#uimm < 8)
sli Vn.H8, Vm.H8, #uimm                                                             (#uimm < 16)
sli Vn.H4, Vm.H4, #uimm                                                             (#uimm < 16)
sli Vn.S4, Vm.S4, #uimm                                                             (#uimm < 32)
sli Vn.S2, Vm.S2, #uimm                                                             (#uimm < 32)
sli Vn.D2, Vm.D2, #uimm                                                             (#uimm < 64)

sm3partw1

sm3partw1 Vn.S4, Vm.S4, Va.S4

sm3partw2

sm3partw2 Vn.S4, Vm.S4, Va.S4

sm3ss1

sm3ss1 Vn.S4, Vm.S4, Va.S4, Vb.S4

sm3tt1a

sm3tt1a Vn.S4, Vm.S4, Va.S[uimm]                                                     (#uimm < 4)

sm3tt1b

sm3tt1b Vn.S4, Vm.S4, Va.S[uimm]                                                     (#uimm < 4)

sm3tt2a

sm3tt2a Vn.S4, Vm.S4, Va.S[uimm]                                                     (#uimm < 4)

sm3tt2b

sm3tt2b Vn.S4, Vm.S4, Va.S[uimm]                                                     (#uimm < 4)

sm4e

sm4e Vn.S4, Vm.S4

sm4ekey

sm4ekey Vn.S4, Vm.S4, Va.S4

smaddl

smaddl Xn, Wm, Wa, Xb

smax

smax Vn.B16, Vm.B16, Va.B16
smax Vn.B8, Vm.B8, Va.B8
smax Vn.H8, Vm.H8, Va.H8
smax Vn.H4, Vm.H4, Va.H4
smax Vn.S4, Vm.S4, Va.S4
smax Vn.S2, Vm.S2, Va.S2

smaxp

smaxp Vn.B16, Vm.B16, Va.B16
smaxp Vn.B8, Vm.B8, Va.B8
smaxp Vn.H8, Vm.H8, Va.H8
smaxp Vn.H4, Vm.H4, Va.H4
smaxp Vn.S4, Vm.S4, Va.S4
smaxp Vn.S2, Vm.S2, Va.S2

smaxv

smaxv Bn, Vm.B16
smaxv Bn, Vm.B8
smaxv Hn, Vm.H8
smaxv Hn, Vm.H4
smaxv Sn, Vm.S4

smc

smc #uimm                                                                        (#uimm < 65536)

smin

smin Vn.B16, Vm.B16, Va.B16
smin Vn.B8, Vm.B8, Va.B8
smin Vn.H8, Vm.H8, Va.H8
smin Vn.H4, Vm.H4, Va.H4
smin Vn.S4, Vm.S4, Va.S4
smin Vn.S2, Vm.S2, Va.S2

sminp

sminp Vn.B16, Vm.B16, Va.B16
sminp Vn.B8, Vm.B8, Va.B8
sminp Vn.H8, Vm.H8, Va.H8
sminp Vn.H4, Vm.H4, Va.H4
sminp Vn.S4, Vm.S4, Va.S4
sminp Vn.S2, Vm.S2, Va.S2

sminv

sminv Bn, Vm.B16
sminv Bn, Vm.B8
sminv Hn, Vm.H8
sminv Hn, Vm.H4
sminv Sn, Vm.S4

smlal

smlal Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
smlal Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
smlal Vn.H8, Vm.B8, Va.B8
smlal Vn.S4, Vm.H4, Va.H4
smlal Vn.D2, Vm.S2, Va.S2

smlal2

smlal2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
smlal2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
smlal2 Vn.H8, Vm.B16, Va.B16
smlal2 Vn.S4, Vm.H8, Va.H8
smlal2 Vn.D2, Vm.S4, Va.S4

smlsl

smlsl Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
smlsl Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
smlsl Vn.H8, Vm.B8, Va.B8
smlsl Vn.S4, Vm.H4, Va.H4
smlsl Vn.D2, Vm.S2, Va.S2

smlsl2

smlsl2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
smlsl2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
smlsl2 Vn.H8, Vm.B16, Va.B16
smlsl2 Vn.S4, Vm.H8, Va.H8
smlsl2 Vn.D2, Vm.S4, Va.S4

smnegl

smnegl Xn, Wm, Wa

smov

smov Wn, Vm.B[uimm]                                                                 (#uimm < 16)
smov Wn, Vm.H[uimm]                                                                  (#uimm < 8)
smov Xn, Vm.B[uimm]                                                                 (#uimm < 16)
smov Xn, Vm.H[uimm]                                                                  (#uimm < 8)
smov Xn, Vm.S[uimm]                                                                  (#uimm < 4)

smsubl

smsubl Xn, Wm, Wa, Xb

smulh

smulh Xn, Xm, Xa

smull

smull Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
smull Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
smull Vn.H8, Vm.B8, Va.B8
smull Vn.S4, Vm.H4, Va.H4
smull Vn.D2, Vm.S2, Va.S2
smull Xn, Wm, Wa

smull2

smull2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
smull2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
smull2 Vn.H8, Vm.B16, Va.B16
smull2 Vn.S4, Vm.H8, Va.H8
smull2 Vn.D2, Vm.S4, Va.S4

sqabs

sqabs Bn, Bm
sqabs Hn, Hm
sqabs Sn, Sm
sqabs Dn, Dm
sqabs Vn.B16, Vm.B16
sqabs Vn.B8, Vm.B8
sqabs Vn.H8, Vm.H8
sqabs Vn.H4, Vm.H4
sqabs Vn.S4, Vm.S4
sqabs Vn.S2, Vm.S2
sqabs Vn.D2, Vm.D2

sqadd

sqadd Bn, Bm, Ba
sqadd Hn, Hm, Ha
sqadd Sn, Sm, Sa
sqadd Dn, Dm, Da
sqadd Vn.B16, Vm.B16, Va.B16
sqadd Vn.B8, Vm.B8, Va.B8
sqadd Vn.H8, Vm.H8, Va.H8
sqadd Vn.H4, Vm.H4, Va.H4
sqadd Vn.S4, Vm.S4, Va.S4
sqadd Vn.S2, Vm.S2, Va.S2
sqadd Vn.D2, Vm.D2, Va.D2

sqdmlal

sqdmlal Sn, Hm, Va.H[uimm]                                                (a is 0-15, #uimm < 8)
sqdmlal Dn, Sm, Va.S[uimm]                                                           (#uimm < 4)
sqdmlal Vn.S4, Vm.H4, Va.H[uimm]                                          (a is 0-15, #uimm < 8)
sqdmlal Vn.D2, Vm.S2, Va.S[uimm]                                                     (#uimm < 4)
sqdmlal Sn, Hm, Ha
sqdmlal Dn, Sm, Sa
sqdmlal Vn.S4, Vm.H4, Va.H4
sqdmlal Vn.D2, Vm.S2, Va.S2

sqdmlal2

sqdmlal2 Vn.S4, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqdmlal2 Vn.D2, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqdmlal2 Vn.S4, Vm.H8, Va.H8
sqdmlal2 Vn.D2, Vm.S4, Va.S4

sqdmlsl

sqdmlsl Sn, Hm, Va.H[uimm]                                                (a is 0-15, #uimm < 8)
sqdmlsl Dn, Sm, Va.S[uimm]                                                           (#uimm < 4)
sqdmlsl Vn.S4, Vm.H4, Va.H[uimm]                                          (a is 0-15, #uimm < 8)
sqdmlsl Vn.D2, Vm.S2, Va.S[uimm]                                                     (#uimm < 4)
sqdmlsl Sn, Hm, Ha
sqdmlsl Dn, Sm, Sa
sqdmlsl Vn.S4, Vm.H4, Va.H4
sqdmlsl Vn.D2, Vm.S2, Va.S2

sqdmlsl2

sqdmlsl2 Vn.S4, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqdmlsl2 Vn.D2, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqdmlsl2 Vn.S4, Vm.H8, Va.H8
sqdmlsl2 Vn.D2, Vm.S4, Va.S4

sqdmulh

sqdmulh Hn, Hm, Va.H[uimm]                                                (a is 0-15, #uimm < 8)
sqdmulh Sn, Sm, Va.S[uimm]                                                           (#uimm < 4)
sqdmulh Vn.H8, Vm.H8, Va.H[uimm]                                          (a is 0-15, #uimm < 8)
sqdmulh Vn.H4, Vm.H4, Va.H[uimm]                                          (a is 0-15, #uimm < 8)
sqdmulh Vn.S4, Vm.S4, Va.S[uimm]                                                     (#uimm < 4)
sqdmulh Vn.S2, Vm.S2, Va.S[uimm]                                                     (#uimm < 4)
sqdmulh Hn, Hm, Ha
sqdmulh Sn, Sm, Sa
sqdmulh Vn.H8, Vm.H8, Va.H8
sqdmulh Vn.H4, Vm.H4, Va.H4
sqdmulh Vn.S4, Vm.S4, Va.S4
sqdmulh Vn.S2, Vm.S2, Va.S2

sqdmull

sqdmull Sn, Hm, Va.H[uimm]                                                (a is 0-15, #uimm < 8)
sqdmull Dn, Sm, Va.S[uimm]                                                           (#uimm < 4)
sqdmull Vn.S4, Vm.H4, Va.H[uimm]                                          (a is 0-15, #uimm < 8)
sqdmull Vn.D2, Vm.S2, Va.S[uimm]                                                     (#uimm < 4)
sqdmull Sn, Hm, Ha
sqdmull Dn, Sm, Sa
sqdmull Vn.S4, Vm.H4, Va.H4
sqdmull Vn.D2, Vm.S2, Va.S2

sqdmull2

sqdmull2 Vn.S4, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqdmull2 Vn.D2, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqdmull2 Vn.S4, Vm.H8, Va.H8
sqdmull2 Vn.D2, Vm.S4, Va.S4

sqneg

sqneg Bn, Bm
sqneg Hn, Hm
sqneg Sn, Sm
sqneg Dn, Dm
sqneg Vn.B16, Vm.B16
sqneg Vn.B8, Vm.B8
sqneg Vn.H8, Vm.H8
sqneg Vn.H4, Vm.H4
sqneg Vn.S4, Vm.S4
sqneg Vn.S2, Vm.S2
sqneg Vn.D2, Vm.D2

sqrdmlah

sqrdmlah Hn, Hm, Va.H[uimm]                                               (a is 0-15, #uimm < 8)
sqrdmlah Sn, Sm, Va.S[uimm]                                                          (#uimm < 4)
sqrdmlah Vn.H8, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmlah Vn.H4, Vm.H4, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmlah Vn.S4, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqrdmlah Vn.S2, Vm.S2, Va.S[uimm]                                                    (#uimm < 4)
sqrdmlah Hn, Hm, Ha
sqrdmlah Sn, Sm, Sa
sqrdmlah Vn.H8, Vm.H8, Va.H8
sqrdmlah Vn.H4, Vm.H4, Va.H4
sqrdmlah Vn.S4, Vm.S4, Va.S4
sqrdmlah Vn.S2, Vm.S2, Va.S2

sqrdmlsh

sqrdmlsh Hn, Hm, Va.H[uimm]                                               (a is 0-15, #uimm < 8)
sqrdmlsh Sn, Sm, Va.S[uimm]                                                          (#uimm < 4)
sqrdmlsh Vn.H8, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmlsh Vn.H4, Vm.H4, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmlsh Vn.S4, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqrdmlsh Vn.S2, Vm.S2, Va.S[uimm]                                                    (#uimm < 4)
sqrdmlsh Hn, Hm, Ha
sqrdmlsh Sn, Sm, Sa
sqrdmlsh Vn.H8, Vm.H8, Va.H8
sqrdmlsh Vn.H4, Vm.H4, Va.H4
sqrdmlsh Vn.S4, Vm.S4, Va.S4
sqrdmlsh Vn.S2, Vm.S2, Va.S2

sqrdmulh

sqrdmulh Hn, Hm, Va.H[uimm]                                               (a is 0-15, #uimm < 8)
sqrdmulh Sn, Sm, Va.S[uimm]                                                          (#uimm < 4)
sqrdmulh Vn.H8, Vm.H8, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmulh Vn.H4, Vm.H4, Va.H[uimm]                                         (a is 0-15, #uimm < 8)
sqrdmulh Vn.S4, Vm.S4, Va.S[uimm]                                                    (#uimm < 4)
sqrdmulh Vn.S2, Vm.S2, Va.S[uimm]                                                    (#uimm < 4)
sqrdmulh Hn, Hm, Ha
sqrdmulh Sn, Sm, Sa
sqrdmulh Vn.H8, Vm.H8, Va.H8
sqrdmulh Vn.H4, Vm.H4, Va.H4
sqrdmulh Vn.S4, Vm.S4, Va.S4
sqrdmulh Vn.S2, Vm.S2, Va.S2

sqrshl

sqrshl Bn, Bm, Ba
sqrshl Hn, Hm, Ha
sqrshl Sn, Sm, Sa
sqrshl Dn, Dm, Da
sqrshl Vn.B16, Vm.B16, Va.B16
sqrshl Vn.B8, Vm.B8, Va.B8
sqrshl Vn.H8, Vm.H8, Va.H8
sqrshl Vn.H4, Vm.H4, Va.H4
sqrshl Vn.S4, Vm.S4, Va.S4
sqrshl Vn.S2, Vm.S2, Va.S2
sqrshl Vn.D2, Vm.D2, Va.D2

sqrshrn

sqrshrn Bn, Hm, #uimm                                                          (1 <= #uimm <= 8)
sqrshrn Hn, Sm, #uimm                                                         (1 <= #uimm <= 16)
sqrshrn Sn, Dm, #uimm                                                         (1 <= #uimm <= 32)
sqrshrn Vn.B8, Vm.H8, #uimm                                                    (1 <= #uimm <= 8)
sqrshrn Vn.H4, Vm.S4, #uimm                                                   (1 <= #uimm <= 16)
sqrshrn Vn.S2, Vm.D2, #uimm                                                   (1 <= #uimm <= 32)

sqrshrn2

sqrshrn2 Vn.B16, Vm.H8, #uimm                                                  (1 <= #uimm <= 8)
sqrshrn2 Vn.H8, Vm.S4, #uimm                                                  (1 <= #uimm <= 16)
sqrshrn2 Vn.S4, Vm.D2, #uimm                                                  (1 <= #uimm <= 32)

sqrshrun

sqrshrun Bn, Hm, #uimm                                                         (1 <= #uimm <= 8)
sqrshrun Hn, Sm, #uimm                                                        (1 <= #uimm <= 16)
sqrshrun Sn, Dm, #uimm                                                        (1 <= #uimm <= 32)
sqrshrun Vn.B8, Vm.H8, #uimm                                                   (1 <= #uimm <= 8)
sqrshrun Vn.H4, Vm.S4, #uimm                                                  (1 <= #uimm <= 16)
sqrshrun Vn.S2, Vm.D2, #uimm                                                  (1 <= #uimm <= 32)

sqrshrun2

sqrshrun2 Vn.B16, Vm.H8, #uimm                                                 (1 <= #uimm <= 8)
sqrshrun2 Vn.H8, Vm.S4, #uimm                                                 (1 <= #uimm <= 16)
sqrshrun2 Vn.S4, Vm.D2, #uimm                                                 (1 <= #uimm <= 32)

sqshl

sqshl Bn, Bm, #uimm                                                                  (#uimm < 8)
sqshl Hn, Hm, #uimm                                                                 (#uimm < 16)
sqshl Sn, Sm, #uimm                                                                 (#uimm < 32)
sqshl Dn, Dm, #uimm                                                                 (#uimm < 64)
sqshl Vn.B16, Vm.B16, #uimm                                                          (#uimm < 8)
sqshl Vn.B8, Vm.B8, #uimm                                                            (#uimm < 8)
sqshl Vn.H8, Vm.H8, #uimm                                                           (#uimm < 16)
sqshl Vn.H4, Vm.H4, #uimm                                                           (#uimm < 16)
sqshl Vn.S4, Vm.S4, #uimm                                                           (#uimm < 32)
sqshl Vn.S2, Vm.S2, #uimm                                                           (#uimm < 32)
sqshl Vn.D2, Vm.D2, #uimm                                                           (#uimm < 64)
sqshl Bn, Bm, Ba
sqshl Hn, Hm, Ha
sqshl Sn, Sm, Sa
sqshl Dn, Dm, Da
sqshl Vn.B16, Vm.B16, Va.B16
sqshl Vn.B8, Vm.B8, Va.B8
sqshl Vn.H8, Vm.H8, Va.H8
sqshl Vn.H4, Vm.H4, Va.H4
sqshl Vn.S4, Vm.S4, Va.S4
sqshl Vn.S2, Vm.S2, Va.S2
sqshl Vn.D2, Vm.D2, Va.D2

sqshlu

sqshlu Bn, Bm, #uimm                                                                 (#uimm < 8)
sqshlu Hn, Hm, #uimm                                                                (#uimm < 16)
sqshlu Sn, Sm, #uimm                                                                (#uimm < 32)
sqshlu Dn, Dm, #uimm                                                                (#uimm < 64)
sqshlu Vn.B16, Vm.B16, #uimm                                                         (#uimm < 8)
sqshlu Vn.B8, Vm.B8, #uimm                                                           (#uimm < 8)
sqshlu Vn.H8, Vm.H8, #uimm                                                          (#uimm < 16)
sqshlu Vn.H4, Vm.H4, #uimm                                                          (#uimm < 16)
sqshlu Vn.S4, Vm.S4, #uimm                                                          (#uimm < 32)
sqshlu Vn.S2, Vm.S2, #uimm                                                          (#uimm < 32)
sqshlu Vn.D2, Vm.D2, #uimm                                                          (#uimm < 64)

sqshrn

sqshrn Bn, Hm, #uimm                                                           (1 <= #uimm <= 8)
sqshrn Hn, Sm, #uimm                                                          (1 <= #uimm <= 16)
sqshrn Sn, Dm, #uimm                                                          (1 <= #uimm <= 32)
sqshrn Vn.B8, Vm.H8, #uimm                                                     (1 <= #uimm <= 8)
sqshrn Vn.H4, Vm.S4, #uimm                                                    (1 <= #uimm <= 16)
sqshrn Vn.S2, Vm.D2, #uimm                                                    (1 <= #uimm <= 32)

sqshrn2

sqshrn2 Vn.B16, Vm.H8, #uimm                                                   (1 <= #uimm <= 8)
sqshrn2 Vn.H8, Vm.S4, #uimm                                                   (1 <= #uimm <= 16)
sqshrn2 Vn.S4, Vm.D2, #uimm                                                   (1 <= #uimm <= 32)

sqshrun

sqshrun Bn, Hm, #uimm                                                          (1 <= #uimm <= 8)
sqshrun Hn, Sm, #uimm                                                         (1 <= #uimm <= 16)
sqshrun Sn, Dm, #uimm                                                         (1 <= #uimm <= 32)
sqshrun Vn.B8, Vm.H8, #uimm                                                    (1 <= #uimm <= 8)
sqshrun Vn.H4, Vm.S4, #uimm                                                   (1 <= #uimm <= 16)
sqshrun Vn.S2, Vm.D2, #uimm                                                   (1 <= #uimm <= 32)

sqshrun2

sqshrun2 Vn.B16, Vm.H8, #uimm                                                  (1 <= #uimm <= 8)
sqshrun2 Vn.H8, Vm.S4, #uimm                                                  (1 <= #uimm <= 16)
sqshrun2 Vn.S4, Vm.D2, #uimm                                                  (1 <= #uimm <= 32)

sqsub

sqsub Bn, Bm, Ba
sqsub Hn, Hm, Ha
sqsub Sn, Sm, Sa
sqsub Dn, Dm, Da
sqsub Vn.B16, Vm.B16, Va.B16
sqsub Vn.B8, Vm.B8, Va.B8
sqsub Vn.H8, Vm.H8, Va.H8
sqsub Vn.H4, Vm.H4, Va.H4
sqsub Vn.S4, Vm.S4, Va.S4
sqsub Vn.S2, Vm.S2, Va.S2
sqsub Vn.D2, Vm.D2, Va.D2

sqxtn

sqxtn Bn, Hm
sqxtn Hn, Sm
sqxtn Sn, Dm
sqxtn Vn.B8, Vm.H8
sqxtn Vn.H4, Vm.S4
sqxtn Vn.S2, Vm.D2

sqxtn2

sqxtn2 Vn.B16, Vm.H8
sqxtn2 Vn.H8, Vm.S4
sqxtn2 Vn.S4, Vm.D2

sqxtun

sqxtun Bn, Hm
sqxtun Hn, Sm
sqxtun Sn, Dm
sqxtun Vn.B8, Vm.H8
sqxtun Vn.H4, Vm.S4
sqxtun Vn.S2, Vm.D2

sqxtun2

sqxtun2 Vn.B16, Vm.H8
sqxtun2 Vn.H8, Vm.S4
sqxtun2 Vn.S4, Vm.D2

srhadd

srhadd Vn.B16, Vm.B16, Va.B16
srhadd Vn.B8, Vm.B8, Va.B8
srhadd Vn.H8, Vm.H8, Va.H8
srhadd Vn.H4, Vm.H4, Va.H4
srhadd Vn.S4, Vm.S4, Va.S4
srhadd Vn.S2, Vm.S2, Va.S2

sri

sri Dn, Dm, #uimm                                                             (1 <= #uimm <= 64)
sri Vn.B16, Vm.B16, #uimm                                                      (1 <= #uimm <= 8)
sri Vn.B8, Vm.B8, #uimm                                                        (1 <= #uimm <= 8)
sri Vn.H8, Vm.H8, #uimm                                                       (1 <= #uimm <= 16)
sri Vn.H4, Vm.H4, #uimm                                                       (1 <= #uimm <= 16)
sri Vn.S4, Vm.S4, #uimm                                                       (1 <= #uimm <= 32)
sri Vn.S2, Vm.S2, #uimm                                                       (1 <= #uimm <= 32)
sri Vn.D2, Vm.D2, #uimm                                                       (1 <= #uimm <= 64)

srshl

srshl Dn, Dm, Da
srshl Vn.B16, Vm.B16, Va.B16
srshl Vn.B8, Vm.B8, Va.B8
srshl Vn.H8, Vm.H8, Va.H8
srshl Vn.H4, Vm.H4, Va.H4
srshl Vn.S4, Vm.S4, Va.S4
srshl Vn.S2, Vm.S2, Va.S2
srshl Vn.D2, Vm.D2, Va.D2

srshr

srshr Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
srshr Vn.B16, Vm.B16, #uimm                                                    (1 <= #uimm <= 8)
srshr Vn.B8, Vm.B8, #uimm                                                      (1 <= #uimm <= 8)
srshr Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
srshr Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
srshr Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
srshr Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
srshr Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)

srsra

srsra Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
srsra Vn.B16, Vm.B16, #uimm                                                    (1 <= #uimm <= 8)
srsra Vn.B8, Vm.B8, #uimm                                                      (1 <= #uimm <= 8)
srsra Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
srsra Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
srsra Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
srsra Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
srsra Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)

ssbb

ssbb

sshl

sshl Dn, Dm, Da
sshl Vn.B16, Vm.B16, Va.B16
sshl Vn.B8, Vm.B8, Va.B8
sshl Vn.H8, Vm.H8, Va.H8
sshl Vn.H4, Vm.H4, Va.H4
sshl Vn.S4, Vm.S4, Va.S4
sshl Vn.S2, Vm.S2, Va.S2
sshl Vn.D2, Vm.D2, Va.D2

sshll

sshll Vn.H8, Vm.B8, #uimm                                                            (#uimm < 8)
sshll Vn.S4, Vm.H4, #uimm                                                           (#uimm < 16)
sshll Vn.D2, Vm.S2, #uimm                                                           (#uimm < 32)

sshll2

sshll2 Vn.H8, Vm.B16, #uimm                                                          (#uimm < 8)
sshll2 Vn.S4, Vm.H8, #uimm                                                          (#uimm < 16)
sshll2 Vn.D2, Vm.S4, #uimm                                                          (#uimm < 32)

sshr

sshr Dn, Dm, #uimm                                                            (1 <= #uimm <= 64)
sshr Vn.B16, Vm.B16, #uimm                                                     (1 <= #uimm <= 8)
sshr Vn.B8, Vm.B8, #uimm                                                       (1 <= #uimm <= 8)
sshr Vn.H8, Vm.H8, #uimm                                                      (1 <= #uimm <= 16)
sshr Vn.H4, Vm.H4, #uimm                                                      (1 <= #uimm <= 16)
sshr Vn.S4, Vm.S4, #uimm                                                      (1 <= #uimm <= 32)
sshr Vn.S2, Vm.S2, #uimm                                                      (1 <= #uimm <= 32)
sshr Vn.D2, Vm.D2, #uimm                                                      (1 <= #uimm <= 64)

ssra

ssra Dn, Dm, #uimm                                                            (1 <= #uimm <= 64)
ssra Vn.B16, Vm.B16, #uimm                                                     (1 <= #uimm <= 8)
ssra Vn.B8, Vm.B8, #uimm                                                       (1 <= #uimm <= 8)
ssra Vn.H8, Vm.H8, #uimm                                                      (1 <= #uimm <= 16)
ssra Vn.H4, Vm.H4, #uimm                                                      (1 <= #uimm <= 16)
ssra Vn.S4, Vm.S4, #uimm                                                      (1 <= #uimm <= 32)
ssra Vn.S2, Vm.S2, #uimm                                                      (1 <= #uimm <= 32)
ssra Vn.D2, Vm.D2, #uimm                                                      (1 <= #uimm <= 64)

ssubl

ssubl Vn.H8, Vm.B8, Va.B8
ssubl Vn.S4, Vm.H4, Va.H4
ssubl Vn.D2, Vm.S2, Va.S2

ssubl2

ssubl2 Vn.H8, Vm.B16, Va.B16
ssubl2 Vn.S4, Vm.H8, Va.H8
ssubl2 Vn.D2, Vm.S4, Va.S4

ssubw

ssubw Vn.H8, Vm.H8, Va.B8
ssubw Vn.S4, Vm.S4, Va.H4
ssubw Vn.D2, Vm.D2, Va.S2

ssubw2

ssubw2 Vn.H8, Vm.H8, Va.B16
ssubw2 Vn.S4, Vm.S4, Va.H8
ssubw2 Vn.D2, Vm.D2, Va.S4

st1

st1 {Vn.B16 * 1}, [Xm|SP]
st1 {Vn.B8 * 1}, [Xm|SP]
st1 {Vn.H8 * 1}, [Xm|SP]
st1 {Vn.H4 * 1}, [Xm|SP]
st1 {Vn.S4 * 1}, [Xm|SP]
st1 {Vn.S2 * 1}, [Xm|SP]
st1 {Vn.D2 * 1}, [Xm|SP]
st1 {Vn.D1 * 1}, [Xm|SP]
st1 {Vn.B16 * 2}, [Xm|SP]
st1 {Vn.B8 * 2}, [Xm|SP]
st1 {Vn.H8 * 2}, [Xm|SP]
st1 {Vn.H4 * 2}, [Xm|SP]
st1 {Vn.S4 * 2}, [Xm|SP]
st1 {Vn.S2 * 2}, [Xm|SP]
st1 {Vn.D2 * 2}, [Xm|SP]
st1 {Vn.D1 * 2}, [Xm|SP]
st1 {Vn.B16 * 3}, [Xm|SP]
st1 {Vn.B8 * 3}, [Xm|SP]
st1 {Vn.H8 * 3}, [Xm|SP]
st1 {Vn.H4 * 3}, [Xm|SP]
st1 {Vn.S4 * 3}, [Xm|SP]
st1 {Vn.S2 * 3}, [Xm|SP]
st1 {Vn.D2 * 3}, [Xm|SP]
st1 {Vn.D1 * 3}, [Xm|SP]
st1 {Vn.B16 * 4}, [Xm|SP]
st1 {Vn.B8 * 4}, [Xm|SP]
st1 {Vn.H8 * 4}, [Xm|SP]
st1 {Vn.H4 * 4}, [Xm|SP]
st1 {Vn.S4 * 4}, [Xm|SP]
st1 {Vn.S2 * 4}, [Xm|SP]
st1 {Vn.D2 * 4}, [Xm|SP]
st1 {Vn.D1 * 4}, [Xm|SP]
st1 {Vn.B8 * 1}, [Xm|SP], 8
st1 {Vn.H4 * 1}, [Xm|SP], 8
st1 {Vn.S2 * 1}, [Xm|SP], 8
st1 {Vn.D1 * 1}, [Xm|SP], 8
st1 {Vn.B16 * 1}, [Xm|SP], 16
st1 {Vn.H8 * 1}, [Xm|SP], 16
st1 {Vn.S4 * 1}, [Xm|SP], 16
st1 {Vn.D2 * 1}, [Xm|SP], 16
st1 {Vn.B16 * 1}, [Xm|SP], Xa                                                        (a is 0-30)
st1 {Vn.B8 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H8 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H4 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S4 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S2 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D2 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D1 * 1}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.B8 * 2}, [Xm|SP], 16
st1 {Vn.H4 * 2}, [Xm|SP], 16
st1 {Vn.S2 * 2}, [Xm|SP], 16
st1 {Vn.D1 * 2}, [Xm|SP], 16
st1 {Vn.B16 * 2}, [Xm|SP], 32
st1 {Vn.H8 * 2}, [Xm|SP], 32
st1 {Vn.S4 * 2}, [Xm|SP], 32
st1 {Vn.D2 * 2}, [Xm|SP], 32
st1 {Vn.B16 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
st1 {Vn.B8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D1 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.B8 * 3}, [Xm|SP], 24
st1 {Vn.H4 * 3}, [Xm|SP], 24
st1 {Vn.S2 * 3}, [Xm|SP], 24
st1 {Vn.D1 * 3}, [Xm|SP], 24
st1 {Vn.B16 * 3}, [Xm|SP], 48
st1 {Vn.H8 * 3}, [Xm|SP], 48
st1 {Vn.S4 * 3}, [Xm|SP], 48
st1 {Vn.D2 * 3}, [Xm|SP], 48
st1 {Vn.B16 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
st1 {Vn.B8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D1 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.B8 * 4}, [Xm|SP], 32
st1 {Vn.H4 * 4}, [Xm|SP], 32
st1 {Vn.S2 * 4}, [Xm|SP], 32
st1 {Vn.D1 * 4}, [Xm|SP], 32
st1 {Vn.B16 * 4}, [Xm|SP], 64
st1 {Vn.H8 * 4}, [Xm|SP], 64
st1 {Vn.S4 * 4}, [Xm|SP], 64
st1 {Vn.D2 * 4}, [Xm|SP], 64
st1 {Vn.B16 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
st1 {Vn.B8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.H4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.S2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.D1 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st1 {Vn.B * 1}[uimm], [Xm|SP]                                                       (#uimm < 16)
st1 {Vn.H * 1}[uimm], [Xm|SP]                                                        (#uimm < 8)
st1 {Vn.S * 1}[uimm], [Xm|SP]                                                        (#uimm < 4)
st1 {Vn.D * 1}[uimm], [Xm|SP]                                                        (#uimm < 2)
st1 {Vn.B * 1}[uimm], [Xm|SP], 1                                                    (#uimm < 16)
st1 {Vn.B * 1}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
st1 {Vn.H * 1}[uimm], [Xm|SP], 2                                                     (#uimm < 8)
st1 {Vn.H * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
st1 {Vn.S * 1}[uimm], [Xm|SP], 4                                                     (#uimm < 4)
st1 {Vn.S * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
st1 {Vn.D * 1}[uimm], [Xm|SP], 8                                                     (#uimm < 2)
st1 {Vn.D * 1}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

st2

st2 {Vn.B16 * 2}, [Xm|SP]
st2 {Vn.B8 * 2}, [Xm|SP]
st2 {Vn.H8 * 2}, [Xm|SP]
st2 {Vn.H4 * 2}, [Xm|SP]
st2 {Vn.S4 * 2}, [Xm|SP]
st2 {Vn.S2 * 2}, [Xm|SP]
st2 {Vn.D2 * 2}, [Xm|SP]
st2 {Vn.B8 * 2}, [Xm|SP], 16
st2 {Vn.H4 * 2}, [Xm|SP], 16
st2 {Vn.S2 * 2}, [Xm|SP], 16
st2 {Vn.B16 * 2}, [Xm|SP], 32
st2 {Vn.H8 * 2}, [Xm|SP], 32
st2 {Vn.S4 * 2}, [Xm|SP], 32
st2 {Vn.D2 * 2}, [Xm|SP], 32
st2 {Vn.B16 * 2}, [Xm|SP], Xa                                                        (a is 0-30)
st2 {Vn.B8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.H8 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.H4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.S4 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.S2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.D2 * 2}, [Xm|SP], Xa                                                         (a is 0-30)
st2 {Vn.B * 2}[uimm], [Xm|SP]                                                       (#uimm < 16)
st2 {Vn.H * 2}[uimm], [Xm|SP]                                                        (#uimm < 8)
st2 {Vn.S * 2}[uimm], [Xm|SP]                                                        (#uimm < 4)
st2 {Vn.D * 2}[uimm], [Xm|SP]                                                        (#uimm < 2)
st2 {Vn.B * 2}[uimm], [Xm|SP], 2                                                    (#uimm < 16)
st2 {Vn.B * 2}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
st2 {Vn.H * 2}[uimm], [Xm|SP], 4                                                     (#uimm < 8)
st2 {Vn.H * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
st2 {Vn.S * 2}[uimm], [Xm|SP], 8                                                     (#uimm < 4)
st2 {Vn.S * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
st2 {Vn.D * 2}[uimm], [Xm|SP], 16                                                    (#uimm < 2)
st2 {Vn.D * 2}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

st3

st3 {Vn.B16 * 3}, [Xm|SP]
st3 {Vn.B8 * 3}, [Xm|SP]
st3 {Vn.H8 * 3}, [Xm|SP]
st3 {Vn.H4 * 3}, [Xm|SP]
st3 {Vn.S4 * 3}, [Xm|SP]
st3 {Vn.S2 * 3}, [Xm|SP]
st3 {Vn.D2 * 3}, [Xm|SP]
st3 {Vn.B8 * 3}, [Xm|SP], 24
st3 {Vn.H4 * 3}, [Xm|SP], 24
st3 {Vn.S2 * 3}, [Xm|SP], 24
st3 {Vn.B16 * 3}, [Xm|SP], 48
st3 {Vn.H8 * 3}, [Xm|SP], 48
st3 {Vn.S4 * 3}, [Xm|SP], 48
st3 {Vn.D2 * 3}, [Xm|SP], 48
st3 {Vn.B16 * 3}, [Xm|SP], Xa                                                        (a is 0-30)
st3 {Vn.B8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.H8 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.H4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.S4 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.S2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.D2 * 3}, [Xm|SP], Xa                                                         (a is 0-30)
st3 {Vn.B * 3}[uimm], [Xm|SP]                                                       (#uimm < 16)
st3 {Vn.H * 3}[uimm], [Xm|SP]                                                        (#uimm < 8)
st3 {Vn.S * 3}[uimm], [Xm|SP]                                                        (#uimm < 4)
st3 {Vn.D * 3}[uimm], [Xm|SP]                                                        (#uimm < 2)
st3 {Vn.B * 3}[uimm], [Xm|SP], 3                                                    (#uimm < 16)
st3 {Vn.B * 3}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
st3 {Vn.H * 3}[uimm], [Xm|SP], 6                                                     (#uimm < 8)
st3 {Vn.H * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
st3 {Vn.S * 3}[uimm], [Xm|SP], 12                                                    (#uimm < 4)
st3 {Vn.S * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
st3 {Vn.D * 3}[uimm], [Xm|SP], 24                                                    (#uimm < 2)
st3 {Vn.D * 3}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

st4

st4 {Vn.B16 * 4}, [Xm|SP]
st4 {Vn.B8 * 4}, [Xm|SP]
st4 {Vn.H8 * 4}, [Xm|SP]
st4 {Vn.H4 * 4}, [Xm|SP]
st4 {Vn.S4 * 4}, [Xm|SP]
st4 {Vn.S2 * 4}, [Xm|SP]
st4 {Vn.D2 * 4}, [Xm|SP]
st4 {Vn.B8 * 4}, [Xm|SP], 32
st4 {Vn.H4 * 4}, [Xm|SP], 32
st4 {Vn.S2 * 4}, [Xm|SP], 32
st4 {Vn.B16 * 4}, [Xm|SP], 64
st4 {Vn.H8 * 4}, [Xm|SP], 64
st4 {Vn.S4 * 4}, [Xm|SP], 64
st4 {Vn.D2 * 4}, [Xm|SP], 64
st4 {Vn.B16 * 4}, [Xm|SP], Xa                                                        (a is 0-30)
st4 {Vn.B8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.H8 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.H4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.S4 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.S2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.D2 * 4}, [Xm|SP], Xa                                                         (a is 0-30)
st4 {Vn.B * 4}[uimm], [Xm|SP]                                                       (#uimm < 16)
st4 {Vn.H * 4}[uimm], [Xm|SP]                                                        (#uimm < 8)
st4 {Vn.S * 4}[uimm], [Xm|SP]                                                        (#uimm < 4)
st4 {Vn.D * 4}[uimm], [Xm|SP]                                                        (#uimm < 2)
st4 {Vn.B * 4}[uimm], [Xm|SP], 4                                                    (#uimm < 16)
st4 {Vn.B * 4}[uimm], [Xm|SP], Xa                                        (#uimm < 16, a is 0-30)
st4 {Vn.H * 4}[uimm], [Xm|SP], 8                                                     (#uimm < 8)
st4 {Vn.H * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 8, a is 0-30)
st4 {Vn.S * 4}[uimm], [Xm|SP], 16                                                    (#uimm < 4)
st4 {Vn.S * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 4, a is 0-30)
st4 {Vn.D * 4}[uimm], [Xm|SP], 32                                                    (#uimm < 2)
st4 {Vn.D * 4}[uimm], [Xm|SP], Xa                                         (#uimm < 2, a is 0-30)

stadd

stadd Wn, [Xm|SP]
stadd Xn, [Xm|SP]

staddb

staddb Wn, [Xm|SP]

staddh

staddh Wn, [Xm|SP]

staddl

staddl Wn, [Xm|SP]
staddl Xn, [Xm|SP]

staddlb

staddlb Wn, [Xm|SP]

staddlh

staddlh Wn, [Xm|SP]

stclr

stclr Wn, [Xm|SP]
stclr Xn, [Xm|SP]

stclrb

stclrb Wn, [Xm|SP]

stclrh

stclrh Wn, [Xm|SP]

stclrl

stclrl Wn, [Xm|SP]
stclrl Xn, [Xm|SP]

stclrlb

stclrlb Wn, [Xm|SP]

stclrlh

stclrlh Wn, [Xm|SP]

steor

steor Wn, [Xm|SP]
steor Xn, [Xm|SP]

steorb

steorb Wn, [Xm|SP]

steorh

steorh Wn, [Xm|SP]

steorl

steorl Wn, [Xm|SP]
steorl Xn, [Xm|SP]

steorlb

steorlb Wn, [Xm|SP]

steorlh

steorlh Wn, [Xm|SP]

stllr

stllr Wn, [Xm|SP]
stllr Xn, [Xm|SP]

stllrb

stllrb Wn, [Xm|SP]

stllrh

stllrh Wn, [Xm|SP]

stlr

stlr Wn, [Xm|SP]
stlr Xn, [Xm|SP]

stlrb

stlrb Wn, [Xm|SP]

stlrh

stlrh Wn, [Xm|SP]

stlur

stlur Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)
stlur Xn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

stlurb

stlurb Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

stlurh

stlurh Wn, [Xm|SP {, #simm } ]                                             (-256 <= #simm < 256)

stlxp

stlxp Wn, Wm, Wa, [Xb|SP]
stlxp Wn, Xm, Xa, [Xb|SP]

stlxr

stlxr Wn, Wm, [Xa|SP]
stlxr Wn, Xm, [Xa|SP]

stlxrb

stlxrb Wn, Wm, [Xa|SP]

stlxrh

stlxrh Wn, Wm, [Xa|SP]

stnp

stnp Sn, Sm, [Xa|SP {, #simm } ]                            (-256 <= #simm < 256, #simm = 4 * N)
stnp Dn, Dm, [Xa|SP {, #simm } ]                            (-512 <= #simm < 512, #simm = 8 * N)
stnp Qn, Qm, [Xa|SP {, #simm } ]                         (-1024 <= #simm < 1024, #simm = 16 * N)
stnp Wn, Wm, [Xa|SP {, #simm } ]                            (-256 <= #simm < 256, #simm = 4 * N)
stnp Xn, Xm, [Xa|SP {, #simm } ]                            (-512 <= #simm < 512, #simm = 8 * N)

stp

stp Sn, Sm, [Xa|SP], #simm                                  (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP], #simm                                  (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP], #simm                               (-1024 <= #simm < 1024, #simm = 16 * N)
stp Sn, Sm, [Xa|SP, #simm]!                                 (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP, #simm]!                                 (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP, #simm]!                              (-1024 <= #simm < 1024, #simm = 16 * N)
stp Sn, Sm, [Xa|SP {, #simm } ]                             (-256 <= #simm < 256, #simm = 4 * N)
stp Dn, Dm, [Xa|SP {, #simm } ]                             (-512 <= #simm < 512, #simm = 8 * N)
stp Qn, Qm, [Xa|SP {, #simm } ]                          (-1024 <= #simm < 1024, #simm = 16 * N)
stp Wn, Wm, [Xa|SP], #simm                                  (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP], #simm                                  (-512 <= #simm < 512, #simm = 8 * N)
stp Wn, Wm, [Xa|SP, #simm]!                                 (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP, #simm]!                                 (-512 <= #simm < 512, #simm = 8 * N)
stp Wn, Wm, [Xa|SP {, #simm } ]                             (-256 <= #simm < 256, #simm = 4 * N)
stp Xn, Xm, [Xa|SP {, #simm } ]                             (-512 <= #simm < 512, #simm = 8 * N)

str

str Bn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Hn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Sn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Dn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Qn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Bn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Hn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Sn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Dn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Qn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Bn, [Xm|SP {, #uimm } ]                                                       (#uimm < 4096)
str Hn, [Xm|SP {, #uimm } ]                                        (#uimm < 8192, #uimm = 2 * N)
str Sn, [Xm|SP {, #uimm } ]                                       (#uimm < 16384, #uimm = 4 * N)
str Dn, [Xm|SP {, #uimm } ]                                       (#uimm < 32768, #uimm = 8 * N)
str Qn, [Xm|SP {, #uimm } ]                                      (#uimm < 65536, #uimm = 16 * N)
str Wn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Xn, [Xm|SP], #simm                                                     (-256 <= #simm < 256)
str Wn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Xn, [Xm|SP, #simm]!                                                    (-256 <= #simm < 256)
str Wn, [Xm|SP {, #uimm } ]                                       (#uimm < 16384, #uimm = 4 * N)
str Xn, [Xm|SP {, #uimm } ]                                       (#uimm < 32768, #uimm = 8 * N)
str Bn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 0])
str Hn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 1])
str Sn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 2])
str Dn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 3])
str Qn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 4])
str Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 2])
str Xn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                      (#uimm = [0, 3])

strb

strb Wn, [Xm|SP], #simm                                                    (-256 <= #simm < 256)
strb Wn, [Xm|SP, #simm]!                                                   (-256 <= #simm < 256)
strb Wn, [Xm|SP {, #uimm } ]                                                      (#uimm < 4096)
strb Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                     (#uimm = [0, 0])

strh

strh Wn, [Xm|SP], #simm                                                    (-256 <= #simm < 256)
strh Wn, [Xm|SP, #simm]!                                                   (-256 <= #simm < 256)
strh Wn, [Xm|SP {, #uimm } ]                                       (#uimm < 8192, #uimm = 2 * N)
strh Wn, [Xm|SP, Wa|Xa { , UXTW|LSL|SXTW|SXTX { #uimm } } ]                     (#uimm = [0, 1])

stset

stset Wn, [Xm|SP]
stset Xn, [Xm|SP]

stsetb

stsetb Wn, [Xm|SP]

stseth

stseth Wn, [Xm|SP]

stsetl

stsetl Wn, [Xm|SP]
stsetl Xn, [Xm|SP]

stsetlb

stsetlb Wn, [Xm|SP]

stsetlh

stsetlh Wn, [Xm|SP]

stsmax

stsmax Wn, [Xm|SP]
stsmax Xn, [Xm|SP]

stsmaxb

stsmaxb Wn, [Xm|SP]

stsmaxh

stsmaxh Wn, [Xm|SP]

stsmaxl

stsmaxl Wn, [Xm|SP]
stsmaxl Xn, [Xm|SP]

stsmaxlb

stsmaxlb Wn, [Xm|SP]

stsmaxlh

stsmaxlh Wn, [Xm|SP]

stsmin

stsmin Wn, [Xm|SP]
stsmin Xn, [Xm|SP]

stsminb

stsminb Wn, [Xm|SP]

stsminh

stsminh Wn, [Xm|SP]

stsminl

stsminl Wn, [Xm|SP]
stsminl Xn, [Xm|SP]

stsminlb

stsminlb Wn, [Xm|SP]

stsminlh

stsminlh Wn, [Xm|SP]

sttr

sttr Wn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
sttr Xn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)

sttrb

sttrb Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

sttrh

sttrh Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

stumax

stumax Wn, [Xm|SP]
stumax Xn, [Xm|SP]

stumaxb

stumaxb Wn, [Xm|SP]

stumaxh

stumaxh Wn, [Xm|SP]

stumaxl

stumaxl Wn, [Xm|SP]
stumaxl Xn, [Xm|SP]

stumaxlb

stumaxlb Wn, [Xm|SP]

stumaxlh

stumaxlh Wn, [Xm|SP]

stumin

stumin Wn, [Xm|SP]
stumin Xn, [Xm|SP]

stuminb

stuminb Wn, [Xm|SP]

stuminh

stuminh Wn, [Xm|SP]

stuminl

stuminl Wn, [Xm|SP]
stuminl Xn, [Xm|SP]

stuminlb

stuminlb Wn, [Xm|SP]

stuminlh

stuminlh Wn, [Xm|SP]

stur

stur Bn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Hn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Sn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Dn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Qn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Wn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)
stur Xn, [Xm|SP {, #simm } ]                                               (-256 <= #simm < 256)

sturb

sturb Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

sturh

sturh Wn, [Xm|SP {, #simm } ]                                              (-256 <= #simm < 256)

stxp

stxp Wn, Wm, Wa, [Xb|SP]
stxp Wn, Xm, Xa, [Xb|SP]

stxr

stxr Wn, Wm, [Xa|SP]
stxr Wn, Xm, [Xa|SP]

stxrb

stxrb Wn, Wm, [Xa|SP]

stxrh

stxrh Wn, Wm, [Xa|SP]

sub

sub Wn, Wm, Wa {, LSL|LSR|ASR #uimm }                                               (#uimm < 32)
sub Xn, Xm, Xa {, LSL|LSR|ASR #uimm }                                               (#uimm < 64)
sub Wn|WSP, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                      (0 <= #uimm <= 4)
sub Xn|SP, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } }                          (0 <= #uimm <= 4)
sub Xn|SP, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm }                                  (0 <= #uimm <= 4)
sub Wn|WSP, Wm|WSP, #uimm {, LSL #uimm1 }                       (#uimm < 4096, #uimm1 = [0, 12])
sub Xn|SP, Xm|SP, #uimm {, LSL #uimm1 }                         (#uimm < 4096, #uimm1 = [0, 12])
sub Dn, Dm, Da
sub Vn.B16, Vm.B16, Va.B16
sub Vn.B8, Vm.B8, Va.B8
sub Vn.H8, Vm.H8, Va.H8
sub Vn.H4, Vm.H4, Va.H4
sub Vn.S4, Vm.S4, Va.S4
sub Vn.S2, Vm.S2, Va.S2
sub Vn.D2, Vm.D2, Va.D2

subhn

subhn Vn.B8, Vm.H8, Va.H8
subhn Vn.H4, Vm.S4, Va.S4
subhn Vn.S2, Vm.D2, Va.D2

subhn2

subhn2 Vn.B16, Vm.H8, Va.H8
subhn2 Vn.H8, Vm.S4, Va.S4
subhn2 Vn.S4, Vm.D2, Va.D2

subs

subs Wn, Wm, Wa {, LSL|LSR|ASR #uimm }                                              (#uimm < 32)
subs Xn, Xm, Xa {, LSL|LSR|ASR #uimm }                                              (#uimm < 64)
subs Wn, Wm|WSP, Wa {, LSL|UXT[BHWX]|SXT[BHWX] #uimm }                         (0 <= #uimm <= 4)
subs Xn, Xm|SP, Wa {, UXT[BHW]|SXT[BHW] { #uimm } }                            (0 <= #uimm <= 4)
subs Xn, Xm|SP, Xa {, LSL|UXTX|SXTX #uimm }                                    (0 <= #uimm <= 4)
subs Wn, Wm|WSP, #uimm {, LSL #uimm1 }                          (#uimm < 4096, #uimm1 = [0, 12])
subs Xn, Xm|SP, #uimm {, LSL #uimm1 }                           (#uimm < 4096, #uimm1 = [0, 12])

suqadd

suqadd Bn, Bm
suqadd Hn, Hm
suqadd Sn, Sm
suqadd Dn, Dm
suqadd Vn.B16, Vm.B16
suqadd Vn.B8, Vm.B8
suqadd Vn.H8, Vm.H8
suqadd Vn.H4, Vm.H4
suqadd Vn.S4, Vm.S4
suqadd Vn.S2, Vm.S2
suqadd Vn.D2, Vm.D2

svc

svc #uimm                                                                        (#uimm < 65536)

swp

swp Wn, Wm, [Xa|SP]
swp Xn, Xm, [Xa|SP]

swpa

swpa Wn, Wm, [Xa|SP]
swpa Xn, Xm, [Xa|SP]

swpab

swpab Wn, Wm, [Xa|SP]

swpah

swpah Wn, Wm, [Xa|SP]

swpal

swpal Wn, Wm, [Xa|SP]
swpal Xn, Xm, [Xa|SP]

swpalb

swpalb Wn, Wm, [Xa|SP]

swpalh

swpalh Wn, Wm, [Xa|SP]

swpb

swpb Wn, Wm, [Xa|SP]

swph

swph Wn, Wm, [Xa|SP]

swpl

swpl Wn, Wm, [Xa|SP]
swpl Xn, Xm, [Xa|SP]

swplb

swplb Wn, Wm, [Xa|SP]

swplh

swplh Wn, Wm, [Xa|SP]

sxtb

sxtb Wn, Wm
sxtb Xn, Wm

sxth

sxth Wn, Wm
sxth Xn, Wm

sxtl

sxtl Vn.H8, Vm.B8
sxtl Vn.S4, Vm.H4
sxtl Vn.D2, Vm.S2

sxtl2

sxtl2 Vn.H8, Vm.B16
sxtl2 Vn.S4, Vm.H8
sxtl2 Vn.D2, Vm.S4

sxtw

sxtw Xn, Wm

sys

sys #uimm, control_reg, control_reg, #uimm1 {, Xn }                      (#uimm < 8, #uimm1 < 8)

sysl

sysl Xn, #uimm, control_reg, control_reg, #uimm1                         (#uimm < 8, #uimm1 < 8)

tbl

tbl Vn.B16, {Vm.B16 * 2}, Va.B16
tbl Vn.B8, {Vm.B16 * 2}, Va.B8
tbl Vn.B16, {Vm.B16 * 3}, Va.B16
tbl Vn.B8, {Vm.B16 * 3}, Va.B8
tbl Vn.B16, {Vm.B16 * 4}, Va.B16
tbl Vn.B8, {Vm.B16 * 4}, Va.B8
tbl Vn.B16, {Vm.B16 * 1}, Va.B16
tbl Vn.B8, {Vm.B16 * 1}, Va.B8

tbnz

tbnz Wn, #uimm, <offset>                          (#uimm < 32, offset is 14 bit, 4-byte aligned)
tbnz Xn, #uimm, <offset>                          (#uimm < 64, offset is 14 bit, 4-byte aligned)

tbx

tbx Vn.B16, {Vm.B16 * 2}, Va.B16
tbx Vn.B8, {Vm.B16 * 2}, Va.B8
tbx Vn.B16, {Vm.B16 * 3}, Va.B16
tbx Vn.B8, {Vm.B16 * 3}, Va.B8
tbx Vn.B16, {Vm.B16 * 4}, Va.B16
tbx Vn.B8, {Vm.B16 * 4}, Va.B8
tbx Vn.B16, {Vm.B16 * 1}, Va.B16
tbx Vn.B8, {Vm.B16 * 1}, Va.B8

tbz

tbz Wn, #uimm, <offset>                           (#uimm < 32, offset is 14 bit, 4-byte aligned)
tbz Xn, #uimm, <offset>                           (#uimm < 64, offset is 14 bit, 4-byte aligned)

tlbi

tlbi tlbi_op {, Xn }

trn1

trn1 Vn.B16, Vm.B16, Va.B16
trn1 Vn.B8, Vm.B8, Va.B8
trn1 Vn.H8, Vm.H8, Va.H8
trn1 Vn.H4, Vm.H4, Va.H4
trn1 Vn.S4, Vm.S4, Va.S4
trn1 Vn.S2, Vm.S2, Va.S2
trn1 Vn.D2, Vm.D2, Va.D2

trn2

trn2 Vn.B16, Vm.B16, Va.B16
trn2 Vn.B8, Vm.B8, Va.B8
trn2 Vn.H8, Vm.H8, Va.H8
trn2 Vn.H4, Vm.H4, Va.H4
trn2 Vn.S4, Vm.S4, Va.S4
trn2 Vn.S2, Vm.S2, Va.S2
trn2 Vn.D2, Vm.D2, Va.D2

tsb

tsb csync

tst

tst Wn, #imm                                                       (#imm is a logical immediate)
tst Xn, #imm                                                       (#imm is a logical immediate)
tst Wn, Wm {, LSL|LSR|ASR|ROR #uimm }                                               (#uimm < 32)
tst Xn, Xm {, LSL|LSR|ASR|ROR #uimm }                                               (#uimm < 64)

uaba

uaba Vn.B16, Vm.B16, Va.B16
uaba Vn.B8, Vm.B8, Va.B8
uaba Vn.H8, Vm.H8, Va.H8
uaba Vn.H4, Vm.H4, Va.H4
uaba Vn.S4, Vm.S4, Va.S4
uaba Vn.S2, Vm.S2, Va.S2

uabal

uabal Vn.H8, Vm.B8, Va.B8
uabal Vn.S4, Vm.H4, Va.H4
uabal Vn.D2, Vm.S2, Va.S2

uabal2

uabal2 Vn.H8, Vm.B16, Va.B16
uabal2 Vn.S4, Vm.H8, Va.H8
uabal2 Vn.D2, Vm.S4, Va.S4

uabd

uabd Vn.B16, Vm.B16, Va.B16
uabd Vn.B8, Vm.B8, Va.B8
uabd Vn.H8, Vm.H8, Va.H8
uabd Vn.H4, Vm.H4, Va.H4
uabd Vn.S4, Vm.S4, Va.S4
uabd Vn.S2, Vm.S2, Va.S2

uabdl

uabdl Vn.H8, Vm.B8, Va.B8
uabdl Vn.S4, Vm.H4, Va.H4
uabdl Vn.D2, Vm.S2, Va.S2

uabdl2

uabdl2 Vn.H8, Vm.B16, Va.B16
uabdl2 Vn.S4, Vm.H8, Va.H8
uabdl2 Vn.D2, Vm.S4, Va.S4

uadalp

uadalp Vn.H8, Vm.B16
uadalp Vn.H4, Vm.B8
uadalp Vn.S4, Vm.H8
uadalp Vn.S2, Vm.H4
uadalp Vn.D2, Vm.S4
uadalp Vn.D1, Vm.S2

uaddl

uaddl Vn.H8, Vm.B8, Va.B8
uaddl Vn.S4, Vm.H4, Va.H4
uaddl Vn.D2, Vm.S2, Va.S2

uaddl2

uaddl2 Vn.H8, Vm.B16, Va.B16
uaddl2 Vn.S4, Vm.H8, Va.H8
uaddl2 Vn.D2, Vm.S4, Va.S4

uaddlp

uaddlp Vn.H8, Vm.B16
uaddlp Vn.H4, Vm.B8
uaddlp Vn.S4, Vm.H8
uaddlp Vn.S2, Vm.H4
uaddlp Vn.D2, Vm.S4
uaddlp Vn.D1, Vm.S2

uaddlv

uaddlv Hn, Vm.B16
uaddlv Hn, Vm.B8
uaddlv Sn, Vm.H8
uaddlv Sn, Vm.H4
uaddlv Dn, Vm.S4

uaddw

uaddw Vn.H8, Vm.H8, Va.B8
uaddw Vn.S4, Vm.S4, Va.H4
uaddw Vn.D2, Vm.D2, Va.S2

uaddw2

uaddw2 Vn.H8, Vm.H8, Va.B16
uaddw2 Vn.S4, Vm.S4, Va.H8
uaddw2 Vn.D2, Vm.D2, Va.S4

ubfiz

ubfiz Wn, Wm, #uimm, #uimm1                          (0 <= #uimm < 32, 1 <= #uimm1 <= 32 - uimm)
ubfiz Xn, Xm, #uimm, #uimm1                          (0 <= #uimm < 64, 1 <= #uimm1 <= 64 - uimm)

ubfm

ubfm Wn, Wm, #uimm, #uimm1                                             (#uimm < 32, #uimm1 < 32)
ubfm Xn, Xm, #uimm, #uimm1                                (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

ubfx

ubfx Wn, Wm, #uimm, #uimm1                                (#uimm < 32, 1 <= #uimm1 <= 32 - uimm)
ubfx Xn, Xm, #uimm, #uimm1                                (#uimm < 64, 1 <= #uimm1 <= 64 - uimm)

ucvtf

ucvtf Hn, Hm, #uimm                                                           (1 <= #uimm <= 16)
ucvtf Sn, Sm, #uimm                                                           (1 <= #uimm <= 32)
ucvtf Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
ucvtf Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
ucvtf Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
ucvtf Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
ucvtf Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
ucvtf Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)
ucvtf Hn, Hm
ucvtf Sn, Sm
ucvtf Dn, Dm
ucvtf Vn.H8, Vm.H8
ucvtf Vn.H4, Vm.H4
ucvtf Vn.S4, Vm.S4
ucvtf Vn.S2, Vm.S2
ucvtf Vn.D2, Vm.D2
ucvtf Hn, Wm, #uimm                                                           (1 <= #uimm <= 32)
ucvtf Sn, Wm, #uimm                                                           (1 <= #uimm <= 32)
ucvtf Dn, Wm, #uimm                                                           (1 <= #uimm <= 32)
ucvtf Hn, Xm, #uimm                                                           (1 <= #uimm <= 64)
ucvtf Sn, Xm, #uimm                                                           (1 <= #uimm <= 64)
ucvtf Dn, Xm, #uimm                                                           (1 <= #uimm <= 64)
ucvtf Hn, Wm
ucvtf Sn, Wm
ucvtf Dn, Wm
ucvtf Hn, Xm
ucvtf Sn, Xm
ucvtf Dn, Xm

udf

udf #uimm                                                                        (#uimm < 65536)

udiv

udiv Wn, Wm, Wa
udiv Xn, Xm, Xa

udot

udot Vn.S2, Vm.B8, Va.B4[uimm]                                                       (#uimm < 4)
udot Vn.S4, Vm.B16, Va.B4[uimm]                                                      (#uimm < 4)
udot Vn.S2, Vm.B8, Va.B8
udot Vn.S4, Vm.B16, Va.B16

uhadd

uhadd Vn.B16, Vm.B16, Va.B16
uhadd Vn.B8, Vm.B8, Va.B8
uhadd Vn.H8, Vm.H8, Va.H8
uhadd Vn.H4, Vm.H4, Va.H4
uhadd Vn.S4, Vm.S4, Va.S4
uhadd Vn.S2, Vm.S2, Va.S2

uhsub

uhsub Vn.B16, Vm.B16, Va.B16
uhsub Vn.B8, Vm.B8, Va.B8
uhsub Vn.H8, Vm.H8, Va.H8
uhsub Vn.H4, Vm.H4, Va.H4
uhsub Vn.S4, Vm.S4, Va.S4
uhsub Vn.S2, Vm.S2, Va.S2

umaddl

umaddl Xn, Wm, Wa, Xb

umax

umax Vn.B16, Vm.B16, Va.B16
umax Vn.B8, Vm.B8, Va.B8
umax Vn.H8, Vm.H8, Va.H8
umax Vn.H4, Vm.H4, Va.H4
umax Vn.S4, Vm.S4, Va.S4
umax Vn.S2, Vm.S2, Va.S2

umaxp

umaxp Vn.B16, Vm.B16, Va.B16
umaxp Vn.B8, Vm.B8, Va.B8
umaxp Vn.H8, Vm.H8, Va.H8
umaxp Vn.H4, Vm.H4, Va.H4
umaxp Vn.S4, Vm.S4, Va.S4
umaxp Vn.S2, Vm.S2, Va.S2

umaxv

umaxv Bn, Vm.B16
umaxv Bn, Vm.B8
umaxv Hn, Vm.H8
umaxv Hn, Vm.H4
umaxv Sn, Vm.S4

umin

umin Vn.B16, Vm.B16, Va.B16
umin Vn.B8, Vm.B8, Va.B8
umin Vn.H8, Vm.H8, Va.H8
umin Vn.H4, Vm.H4, Va.H4
umin Vn.S4, Vm.S4, Va.S4
umin Vn.S2, Vm.S2, Va.S2

uminp

uminp Vn.B16, Vm.B16, Va.B16
uminp Vn.B8, Vm.B8, Va.B8
uminp Vn.H8, Vm.H8, Va.H8
uminp Vn.H4, Vm.H4, Va.H4
uminp Vn.S4, Vm.S4, Va.S4
uminp Vn.S2, Vm.S2, Va.S2

uminv

uminv Bn, Vm.B16
uminv Bn, Vm.B8
uminv Hn, Vm.H8
uminv Hn, Vm.H4
uminv Sn, Vm.S4

umlal

umlal Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
umlal Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
umlal Vn.H8, Vm.B8, Va.B8
umlal Vn.S4, Vm.H4, Va.H4
umlal Vn.D2, Vm.S2, Va.S2

umlal2

umlal2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
umlal2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
umlal2 Vn.H8, Vm.B16, Va.B16
umlal2 Vn.S4, Vm.H8, Va.H8
umlal2 Vn.D2, Vm.S4, Va.S4

umlsl

umlsl Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
umlsl Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
umlsl Vn.H8, Vm.B8, Va.B8
umlsl Vn.S4, Vm.H4, Va.H4
umlsl Vn.D2, Vm.S2, Va.S2

umlsl2

umlsl2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
umlsl2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
umlsl2 Vn.H8, Vm.B16, Va.B16
umlsl2 Vn.S4, Vm.H8, Va.H8
umlsl2 Vn.D2, Vm.S4, Va.S4

umnegl

umnegl Xn, Wm, Wa

umov

umov Wn, Vm.B[uimm]                                                                 (#uimm < 16)
umov Wn, Vm.H[uimm]                                                                  (#uimm < 8)
umov Wn, Vm.S[uimm]                                                                  (#uimm < 4)
umov Xn, Vm.D[uimm]                                                                  (#uimm < 2)

umsubl

umsubl Xn, Wm, Wa, Xb

umulh

umulh Xn, Xm, Xa

umull

umull Vn.S4, Vm.H4, Va.H[uimm]                                            (a is 0-15, #uimm < 8)
umull Vn.D2, Vm.S2, Va.S[uimm]                                                       (#uimm < 4)
umull Vn.H8, Vm.B8, Va.B8
umull Vn.S4, Vm.H4, Va.H4
umull Vn.D2, Vm.S2, Va.S2
umull Xn, Wm, Wa

umull2

umull2 Vn.S4, Vm.H8, Va.H[uimm]                                           (a is 0-15, #uimm < 8)
umull2 Vn.D2, Vm.S4, Va.S[uimm]                                                      (#uimm < 4)
umull2 Vn.H8, Vm.B16, Va.B16
umull2 Vn.S4, Vm.H8, Va.H8
umull2 Vn.D2, Vm.S4, Va.S4

uqadd

uqadd Bn, Bm, Ba
uqadd Hn, Hm, Ha
uqadd Sn, Sm, Sa
uqadd Dn, Dm, Da
uqadd Vn.B16, Vm.B16, Va.B16
uqadd Vn.B8, Vm.B8, Va.B8
uqadd Vn.H8, Vm.H8, Va.H8
uqadd Vn.H4, Vm.H4, Va.H4
uqadd Vn.S4, Vm.S4, Va.S4
uqadd Vn.S2, Vm.S2, Va.S2
uqadd Vn.D2, Vm.D2, Va.D2

uqrshl

uqrshl Bn, Bm, Ba
uqrshl Hn, Hm, Ha
uqrshl Sn, Sm, Sa
uqrshl Dn, Dm, Da
uqrshl Vn.B16, Vm.B16, Va.B16
uqrshl Vn.B8, Vm.B8, Va.B8
uqrshl Vn.H8, Vm.H8, Va.H8
uqrshl Vn.H4, Vm.H4, Va.H4
uqrshl Vn.S4, Vm.S4, Va.S4
uqrshl Vn.S2, Vm.S2, Va.S2
uqrshl Vn.D2, Vm.D2, Va.D2

uqrshrn

uqrshrn Bn, Hm, #uimm                                                          (1 <= #uimm <= 8)
uqrshrn Hn, Sm, #uimm                                                         (1 <= #uimm <= 16)
uqrshrn Sn, Dm, #uimm                                                         (1 <= #uimm <= 32)
uqrshrn Vn.B8, Vm.H8, #uimm                                                    (1 <= #uimm <= 8)
uqrshrn Vn.H4, Vm.S4, #uimm                                                   (1 <= #uimm <= 16)
uqrshrn Vn.S2, Vm.D2, #uimm                                                   (1 <= #uimm <= 32)

uqrshrn2

uqrshrn2 Vn.B16, Vm.H8, #uimm                                                  (1 <= #uimm <= 8)
uqrshrn2 Vn.H8, Vm.S4, #uimm                                                  (1 <= #uimm <= 16)
uqrshrn2 Vn.S4, Vm.D2, #uimm                                                  (1 <= #uimm <= 32)

uqshl

uqshl Bn, Bm, #uimm                                                                  (#uimm < 8)
uqshl Hn, Hm, #uimm                                                                 (#uimm < 16)
uqshl Sn, Sm, #uimm                                                                 (#uimm < 32)
uqshl Dn, Dm, #uimm                                                                 (#uimm < 64)
uqshl Vn.B16, Vm.B16, #uimm                                                          (#uimm < 8)
uqshl Vn.B8, Vm.B8, #uimm                                                            (#uimm < 8)
uqshl Vn.H8, Vm.H8, #uimm                                                           (#uimm < 16)
uqshl Vn.H4, Vm.H4, #uimm                                                           (#uimm < 16)
uqshl Vn.S4, Vm.S4, #uimm                                                           (#uimm < 32)
uqshl Vn.S2, Vm.S2, #uimm                                                           (#uimm < 32)
uqshl Vn.D2, Vm.D2, #uimm                                                           (#uimm < 64)
uqshl Bn, Bm, Ba
uqshl Hn, Hm, Ha
uqshl Sn, Sm, Sa
uqshl Dn, Dm, Da
uqshl Vn.B16, Vm.B16, Va.B16
uqshl Vn.B8, Vm.B8, Va.B8
uqshl Vn.H8, Vm.H8, Va.H8
uqshl Vn.H4, Vm.H4, Va.H4
uqshl Vn.S4, Vm.S4, Va.S4
uqshl Vn.S2, Vm.S2, Va.S2
uqshl Vn.D2, Vm.D2, Va.D2

uqshrn

uqshrn Bn, Hm, #uimm                                                           (1 <= #uimm <= 8)
uqshrn Hn, Sm, #uimm                                                          (1 <= #uimm <= 16)
uqshrn Sn, Dm, #uimm                                                          (1 <= #uimm <= 32)
uqshrn Vn.B8, Vm.H8, #uimm                                                     (1 <= #uimm <= 8)
uqshrn Vn.H4, Vm.S4, #uimm                                                    (1 <= #uimm <= 16)
uqshrn Vn.S2, Vm.D2, #uimm                                                    (1 <= #uimm <= 32)

uqshrn2

uqshrn2 Vn.B16, Vm.H8, #uimm                                                   (1 <= #uimm <= 8)
uqshrn2 Vn.H8, Vm.S4, #uimm                                                   (1 <= #uimm <= 16)
uqshrn2 Vn.S4, Vm.D2, #uimm                                                   (1 <= #uimm <= 32)

uqsub

uqsub Bn, Bm, Ba
uqsub Hn, Hm, Ha
uqsub Sn, Sm, Sa
uqsub Dn, Dm, Da
uqsub Vn.B16, Vm.B16, Va.B16
uqsub Vn.B8, Vm.B8, Va.B8
uqsub Vn.H8, Vm.H8, Va.H8
uqsub Vn.H4, Vm.H4, Va.H4
uqsub Vn.S4, Vm.S4, Va.S4
uqsub Vn.S2, Vm.S2, Va.S2
uqsub Vn.D2, Vm.D2, Va.D2

uqxtn

uqxtn Bn, Hm
uqxtn Hn, Sm
uqxtn Sn, Dm
uqxtn Vn.B8, Vm.H8
uqxtn Vn.H4, Vm.S4
uqxtn Vn.S2, Vm.D2

uqxtn2

uqxtn2 Vn.B16, Vm.H8
uqxtn2 Vn.H8, Vm.S4
uqxtn2 Vn.S4, Vm.D2

urecpe

urecpe Vn.S4, Vm.S4
urecpe Vn.S2, Vm.S2

urhadd

urhadd Vn.B16, Vm.B16, Va.B16
urhadd Vn.B8, Vm.B8, Va.B8
urhadd Vn.H8, Vm.H8, Va.H8
urhadd Vn.H4, Vm.H4, Va.H4
urhadd Vn.S4, Vm.S4, Va.S4
urhadd Vn.S2, Vm.S2, Va.S2

urshl

urshl Dn, Dm, Da
urshl Vn.B16, Vm.B16, Va.B16
urshl Vn.B8, Vm.B8, Va.B8
urshl Vn.H8, Vm.H8, Va.H8
urshl Vn.H4, Vm.H4, Va.H4
urshl Vn.S4, Vm.S4, Va.S4
urshl Vn.S2, Vm.S2, Va.S2
urshl Vn.D2, Vm.D2, Va.D2

urshr

urshr Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
urshr Vn.B16, Vm.B16, #uimm                                                    (1 <= #uimm <= 8)
urshr Vn.B8, Vm.B8, #uimm                                                      (1 <= #uimm <= 8)
urshr Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
urshr Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
urshr Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
urshr Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
urshr Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)

ursqrte

ursqrte Vn.S4, Vm.S4
ursqrte Vn.S2, Vm.S2

ursra

ursra Dn, Dm, #uimm                                                           (1 <= #uimm <= 64)
ursra Vn.B16, Vm.B16, #uimm                                                    (1 <= #uimm <= 8)
ursra Vn.B8, Vm.B8, #uimm                                                      (1 <= #uimm <= 8)
ursra Vn.H8, Vm.H8, #uimm                                                     (1 <= #uimm <= 16)
ursra Vn.H4, Vm.H4, #uimm                                                     (1 <= #uimm <= 16)
ursra Vn.S4, Vm.S4, #uimm                                                     (1 <= #uimm <= 32)
ursra Vn.S2, Vm.S2, #uimm                                                     (1 <= #uimm <= 32)
ursra Vn.D2, Vm.D2, #uimm                                                     (1 <= #uimm <= 64)

ushl

ushl Dn, Dm, Da
ushl Vn.B16, Vm.B16, Va.B16
ushl Vn.B8, Vm.B8, Va.B8
ushl Vn.H8, Vm.H8, Va.H8
ushl Vn.H4, Vm.H4, Va.H4
ushl Vn.S4, Vm.S4, Va.S4
ushl Vn.S2, Vm.S2, Va.S2
ushl Vn.D2, Vm.D2, Va.D2

ushll

ushll Vn.H8, Vm.B8, #uimm                                                            (#uimm < 8)
ushll Vn.S4, Vm.H4, #uimm                                                           (#uimm < 16)
ushll Vn.D2, Vm.S2, #uimm                                                           (#uimm < 32)

ushll2

ushll2 Vn.H8, Vm.B16, #uimm                                                          (#uimm < 8)
ushll2 Vn.S4, Vm.H8, #uimm                                                          (#uimm < 16)
ushll2 Vn.D2, Vm.S4, #uimm                                                          (#uimm < 32)

ushr

ushr Dn, Dm, #uimm                                                            (1 <= #uimm <= 64)
ushr Vn.B16, Vm.B16, #uimm                                                     (1 <= #uimm <= 8)
ushr Vn.B8, Vm.B8, #uimm                                                       (1 <= #uimm <= 8)
ushr Vn.H8, Vm.H8, #uimm                                                      (1 <= #uimm <= 16)
ushr Vn.H4, Vm.H4, #uimm                                                      (1 <= #uimm <= 16)
ushr Vn.S4, Vm.S4, #uimm                                                      (1 <= #uimm <= 32)
ushr Vn.S2, Vm.S2, #uimm                                                      (1 <= #uimm <= 32)
ushr Vn.D2, Vm.D2, #uimm                                                      (1 <= #uimm <= 64)

usqadd

usqadd Bn, Bm
usqadd Hn, Hm
usqadd Sn, Sm
usqadd Dn, Dm
usqadd Vn.B16, Vm.B16
usqadd Vn.B8, Vm.B8
usqadd Vn.H8, Vm.H8
usqadd Vn.H4, Vm.H4
usqadd Vn.S4, Vm.S4
usqadd Vn.S2, Vm.S2
usqadd Vn.D2, Vm.D2

usra

usra Dn, Dm, #uimm                                                            (1 <= #uimm <= 64)
usra Vn.B16, Vm.B16, #uimm                                                     (1 <= #uimm <= 8)
usra Vn.B8, Vm.B8, #uimm                                                       (1 <= #uimm <= 8)
usra Vn.H8, Vm.H8, #uimm                                                      (1 <= #uimm <= 16)
usra Vn.H4, Vm.H4, #uimm                                                      (1 <= #uimm <= 16)
usra Vn.S4, Vm.S4, #uimm                                                      (1 <= #uimm <= 32)
usra Vn.S2, Vm.S2, #uimm                                                      (1 <= #uimm <= 32)
usra Vn.D2, Vm.D2, #uimm                                                      (1 <= #uimm <= 64)

usubl

usubl Vn.H8, Vm.B8, Va.B8
usubl Vn.S4, Vm.H4, Va.H4
usubl Vn.D2, Vm.S2, Va.S2

usubl2

usubl2 Vn.H8, Vm.B16, Va.B16
usubl2 Vn.S4, Vm.H8, Va.H8
usubl2 Vn.D2, Vm.S4, Va.S4

usubw

usubw Vn.H8, Vm.H8, Va.B8
usubw Vn.S4, Vm.S4, Va.H4
usubw Vn.D2, Vm.D2, Va.S2

usubw2

usubw2 Vn.H8, Vm.H8, Va.B16
usubw2 Vn.S4, Vm.S4, Va.H8
usubw2 Vn.D2, Vm.D2, Va.S4

uxtb

uxtb Wn, Wm

uxth

uxth Wn, Wm

uxtl

uxtl Vn.H8, Vm.B8
uxtl Vn.S4, Vm.H4
uxtl Vn.D2, Vm.S2

uxtl2

uxtl2 Vn.H8, Vm.B16
uxtl2 Vn.S4, Vm.H8
uxtl2 Vn.D2, Vm.S4

uzp1

uzp1 Vn.B16, Vm.B16, Va.B16
uzp1 Vn.B8, Vm.B8, Va.B8
uzp1 Vn.H8, Vm.H8, Va.H8
uzp1 Vn.H4, Vm.H4, Va.H4
uzp1 Vn.S4, Vm.S4, Va.S4
uzp1 Vn.S2, Vm.S2, Va.S2
uzp1 Vn.D2, Vm.D2, Va.D2

uzp2

uzp2 Vn.B16, Vm.B16, Va.B16
uzp2 Vn.B8, Vm.B8, Va.B8
uzp2 Vn.H8, Vm.H8, Va.H8
uzp2 Vn.H4, Vm.H4, Va.H4
uzp2 Vn.S4, Vm.S4, Va.S4
uzp2 Vn.S2, Vm.S2, Va.S2
uzp2 Vn.D2, Vm.D2, Va.D2

wfe

wfe

wfi

wfi

xar

xar Vn.D2, Vm.D2, Va.D2, #uimm                                                      (#uimm < 64)

xpacd

xpacd Xn

xpaci

xpaci Xn

xpaclri

xpaclri

xtn

xtn Vn.B8, Vm.H8
xtn Vn.H4, Vm.S4
xtn Vn.S2, Vm.D2

xtn2

xtn2 Vn.B16, Vm.H8
xtn2 Vn.H8, Vm.S4
xtn2 Vn.S4, Vm.D2

yield

yield

zip1

zip1 Vn.B16, Vm.B16, Va.B16
zip1 Vn.B8, Vm.B8, Va.B8
zip1 Vn.H8, Vm.H8, Va.H8
zip1 Vn.H4, Vm.H4, Va.H4
zip1 Vn.S4, Vm.S4, Va.S4
zip1 Vn.S2, Vm.S2, Va.S2
zip1 Vn.D2, Vm.D2, Va.D2

zip2

zip2 Vn.B16, Vm.B16, Va.B16
zip2 Vn.B8, Vm.B8, Va.B8
zip2 Vn.H8, Vm.H8, Va.H8
zip2 Vn.H4, Vm.H4, Va.H4
zip2 Vn.S4, Vm.S4, Va.S4
zip2 Vn.S2, Vm.S2, Va.S2
zip2 Vn.D2, Vm.D2, Va.D2