use syn::spanned::Spanned;
use proc_macro2::{Span, TokenTree, Literal};
use quote::{quote_spanned};
use proc_macro_error2::emit_error;
use crate::common::{Stmt, Size, Jump, JumpKind, delimited};
use crate::serialize;
use super::{Context, X86Mode};
use super::ast::{RawArg, CleanArg, SizedArg, Instruction, MemoryRefItem, Register, RegKind, RegFamily, RegId};
use super::x64data::get_mnemnonic_data;
use super::x64data::Flags;
use super::x64data::Features;
use super::debug::format_opdata_list;
use std::mem::swap;
use std::slice;
use std::iter;
#[derive(Debug)]
pub struct Opdata {
pub args: &'static [u8], pub ops: &'static [u8],
pub reg: u8,
pub flags: Flags,
pub features: Features
}
pub struct FormatStringIterator<'a> {
inner: iter::Cloned<slice::Iter<'a, u8>>
}
impl<'a> FormatStringIterator<'a> {
pub fn new(buf: &'a [u8]) -> FormatStringIterator<'a> {
FormatStringIterator { inner: buf.iter().cloned() }
}
}
impl<'a> Iterator for FormatStringIterator<'a> {
type Item = (u8, u8);
fn next(&mut self) -> Option<(u8, u8)> {
if let Some(ty) = self.inner.next() {
let size = self.inner.next().expect("Invalid format string data");
Some((ty, size))
} else {
None
}
}
}
const MOD_DIRECT: u8 = 0b11;
const MOD_NODISP: u8 = 0b00; const MOD_NOBASE: u8 = 0b00; const MOD_DISP8: u8 = 0b01;
const MOD_DISP32: u8 = 0b10;
#[derive(Debug, Clone, Copy)]
enum RelocationKind {
Relative,
Absolute,
Extern,
}
impl RelocationKind {
fn to_id(self) -> u8 {
match self {
RelocationKind::Relative => 0,
RelocationKind::Absolute => 1,
RelocationKind::Extern => 2
}
}
}
pub(super) fn compile_instruction(ctx: &mut Context, instruction: Instruction, args: Vec<RawArg>) -> Result<(), Option<String>> {
let mut ops = instruction.idents;
let op = ops.pop().unwrap();
let prefixes = ops;
let mut args = args.into_iter().map(clean_memoryref).collect::<Result<Vec<CleanArg>, _>>()?;
let addr_size = sanitize_indirects_and_sizes(ctx, &mut args)?;
let addr_size = addr_size.unwrap_or(match ctx.mode {
X86Mode::Long => Size::B_8,
X86Mode::Protected => Size::B_4
});
let pref_addr = match (ctx.mode, addr_size) {
(X86Mode::Long, Size::B_8) => false,
(X86Mode::Long, Size::B_4) => true,
(X86Mode::Protected, Size::B_4) => false,
(X86Mode::Protected, Size::B_2) => true,
_ => return Err(Some("Impossible address size".into()))
};
let data = match_op_format(ctx, &op, &args)?;
if !ctx.features.contains(data.features) {
return Err(Some(format!(
"This instruction uses features that are not indicated to be available: {}",
data.features - ctx.features
)));
}
let (mut pref_mod, pref_seg) = get_legacy_prefixes(data, prefixes)?;
let (op_size, args) = size_operands(data, args)?;
let mut pref_size = false;
let mut rex_w = false;
let mut vex_l = false;
if data.flags.intersects(Flags::AUTO_SIZE | Flags::AUTO_NO32 | Flags::AUTO_REXW | Flags::AUTO_VEXL) {
let op_size = op_size.expect("Bad formatting data? No wildcard sizes");
match ctx.mode {
X86Mode::Protected => if op_size == Size::B_8 {
return Err(Some(format!("'{}': Does not support 64 bit operands in 32-bit mode", op)));
},
X86Mode::Long => ()
}
if data.flags.contains(Flags::AUTO_NO32) {
match (op_size, ctx.mode) {
(Size::B_2, _) => pref_size = true,
(Size::B_8, X86Mode::Long) => (),
(Size::B_4, X86Mode::Protected) => (),
(Size::B_4, X86Mode::Long) => return Err(Some(format!("'{}': Does not support 32 bit operands in 64-bit mode", op))),
(_, _) => panic!("bad formatting data"),
}
} else if data.flags.contains(Flags::AUTO_REXW) {
if op_size == Size::B_8 {
rex_w = true;
} else if op_size != Size::B_4 {
return Err(Some(format!("'{}': Does not support 16-bit operands", op)));
}
} else if data.flags.contains(Flags::AUTO_VEXL) {
if op_size == Size::B_32 {
vex_l = true;
} else if op_size != Size::B_16 {
panic!("bad formatting data");
}
} else if op_size == Size::B_2 {
pref_size = true;
} else if op_size == Size::B_8 {
rex_w = true;
} else if op_size != Size::B_4 {
panic!("bad formatting data");
}
}
let pref_size = pref_size || data.flags.contains(Flags::WORD_SIZE);
let rex_w = rex_w || data.flags.contains(Flags::WITH_REXW);
let vex_l = vex_l || data.flags.contains(Flags::WITH_VEXL);
let pref_addr = pref_addr || data.flags.contains(Flags::PREF_67);
if data.flags.contains(Flags::PREF_F0) { pref_mod = Some(0xF0);
} else if data.flags.contains(Flags::PREF_F2) { pref_mod = Some(0xF2);
} else if data.flags.contains(Flags::PREF_F3) { pref_mod = Some(0xF3);
}
let need_rex = check_rex(ctx, data, &args, rex_w)?;
let (mut rm, reg, vvvv, ireg, mut args) = extract_args(data, args);
let mut relocations = Vec::new();
let mut ops = data.ops;
let immediate_opcode = if data.flags.intersects(Flags::IMM_OP) {
let (&imm, rest) = ops.split_last().expect("bad formatting data");
ops = rest;
Some(imm)
} else {
None
};
let buffer = &mut ctx.state.stmts;
if let Some(pref) = pref_seg {
buffer.push(Stmt::u8(pref));
}
if pref_addr {
buffer.push(Stmt::u8(0x67));
}
if data.flags.intersects(Flags::VEX_OP | Flags::XOP_OP) {
let prefix = if pref_size { 0b01
} else if pref_mod == Some(0xF3) { 0b10
} else if pref_mod == Some(0xF2) { 0b11
} else { 0
};
let (&map_sel, tail) = ops.split_first().expect("bad formatting data");
ops = tail;
compile_vex_xop(ctx.mode, buffer, data, ®, &rm, map_sel, rex_w, &vvvv, vex_l, prefix);
} else {
if let Some(pref) = pref_mod {
buffer.push(Stmt::u8(pref));
}
if pref_size {
buffer.push(Stmt::u8(0x66));
}
if need_rex {
if ctx.mode == X86Mode::Protected {
return Err(Some(format!("'{}': Does not support 64 bit operand size in 32-bit mode", op)))
}
compile_rex(buffer, rex_w, ®, &rm);
}
}
if data.flags.contains(Flags::SHORT_ARG) {
let (last, head) = ops.split_last().expect("bad formatting data");
ops = head;
buffer.push(Stmt::Extend(Vec::from(ops)));
let rm_k = if let Some(SizedArg::Direct {reg, ..}) = rm.take() {
reg.kind
} else {
panic!("bad formatting data")
};
if let RegKind::Dynamic(_, expr) = rm_k {
let last: TokenTree = Literal::u8_suffixed(*last).into();
buffer.push(Stmt::ExprUnsigned(serialize::expr_mask_shift_or(&last, &delimited(expr), 7, 0), Size::BYTE));
} else {
buffer.push(Stmt::u8(last + (rm_k.encode() & 7)));
}
} else {
buffer.push(Stmt::Extend(Vec::from(ops)));
}
if let Some(SizedArg::Direct {reg: rm, ..}) = rm {
let reg_k = if let Some(SizedArg::Direct {reg, ..}) = reg {
reg.kind
} else {
RegKind::from_number(data.reg)
};
compile_modrm_sib(buffer, MOD_DIRECT, reg_k, rm.kind);
} else if let Some(SizedArg::Indirect {disp_size, base, index, disp, ..}) = rm {
let reg_k = if let Some(SizedArg::Direct {reg, ..}) = reg {
reg.kind
} else {
RegKind::from_number(data.reg)
};
let mode_vsib = index.as_ref().map_or(false, |(i, _, _)| i.kind.family() == RegFamily::XMM);
let mode_16bit = addr_size == Size::B_2;
let mode_rip_relative = base.as_ref().map_or(false, |b| b.kind.family() == RegFamily::RIP);
let mode_rbp_base = base.as_ref().map_or(false, |b| b == &RegId::RBP || b == &RegId::R13 || b.kind.is_dynamic());
if mode_vsib {
let (index, scale, scale_expr) = index.unwrap();
let index = index.kind;
let (base, mode) = if let Some(base) = base {
(base.kind, match (&disp, disp_size) {
(&Some(_), Some(Size::BYTE)) => MOD_DISP8,
(&Some(_), _) => MOD_DISP32,
(&None, _) => MOD_DISP8
})
} else {
(RegKind::Static(RegId::RBP), MOD_NOBASE)
};
compile_modrm_sib(buffer, mode, reg_k, RegKind::Static(RegId::RSP));
if let Some(expr) = scale_expr {
compile_sib_dynscale(buffer, scale as u8, expr, index, base);
} else {
compile_modrm_sib(buffer, encode_scale(scale).unwrap(), index, base);
}
if let Some(disp) = disp {
buffer.push(Stmt::ExprSigned(delimited(disp), if mode == MOD_DISP8 {Size::BYTE} else {Size::B_4}));
} else if mode == MOD_DISP8 {
buffer.push(Stmt::u8(0));
} else {
buffer.push(Stmt::u32(0));
}
} else if mode_16bit {
let base_k = base.unwrap().kind;
let mode = match (&disp, disp_size) {
(&Some(_), Some(Size::BYTE)) => MOD_DISP8,
(&Some(_), _) => MOD_DISP32, (&None, _) => if mode_rbp_base {MOD_DISP8} else {MOD_NODISP}
};
compile_modrm_sib(buffer, mode, reg_k, base_k);
if let Some(disp) = disp {
buffer.push(Stmt::ExprSigned(delimited(disp), if mode == MOD_DISP8 {Size::BYTE} else {Size::B_2}));
} else if mode == MOD_DISP8 {
buffer.push(Stmt::u8(0));
}
} else if mode_rip_relative {
compile_modrm_sib(buffer, MOD_NODISP, reg_k, RegKind::Static(RegId::RBP));
match ctx.mode {
X86Mode::Long => if let Some(disp) = disp {
buffer.push(Stmt::ExprSigned(delimited(disp), Size::B_4));
} else {
buffer.push(Stmt::u32(0))
},
X86Mode::Protected => {
buffer.push(Stmt::u32(0));
let disp = disp.unwrap_or_else(|| serialize::reparse(&serialize::expr_zero()).expect("Invalid expression generated"));
relocations.push((Jump::new(JumpKind::Bare(disp), None), 0, Size::B_4, RelocationKind::Absolute));
},
}
} else {
let no_base = base.is_none();
let mode = if mode_rbp_base && disp.is_none() {
MOD_DISP8
} else if disp.is_none() || no_base {
MOD_NODISP
} else if let Some(Size::BYTE) = disp_size {
MOD_DISP8
} else {
MOD_DISP32
};
if let Some((index, scale, scale_expr)) = index {
let base = if let Some(base) = base {
base.kind
} else {
RegKind::Static(RegId::RBP)
};
compile_modrm_sib(buffer, mode, reg_k, RegKind::Static(RegId::RSP));
if let Some(expr) = scale_expr {
compile_sib_dynscale(buffer, scale as u8, expr, index.kind, base);
} else {
compile_modrm_sib(buffer, encode_scale(scale).unwrap(), index.kind, base);
}
} else if let Some(base) = base {
compile_modrm_sib(buffer, mode, reg_k, base.kind);
} else {
match ctx.mode {
X86Mode::Protected => {
compile_modrm_sib(buffer, mode, reg_k, RegKind::Static(RegId::RBP));
},
X86Mode::Long => {
compile_modrm_sib(buffer, mode, reg_k, RegKind::Static(RegId::RSP));
compile_modrm_sib(buffer, 0, RegKind::Static(RegId::RSP), RegKind::Static(RegId::RBP));
}
}
}
if let Some(disp) = disp {
buffer.push(Stmt::ExprSigned(delimited(disp), if mode == MOD_DISP8 {Size::BYTE} else {Size::B_4}));
} else if no_base {
buffer.push(Stmt::u32(0));
} else if mode == MOD_DISP8 {
buffer.push(Stmt::u8(0));
}
}
} else if let Some(SizedArg::IndirectJumpTarget {jump, ..}) = rm {
let reg_k = if let Some(SizedArg::Direct {reg, ..}) = reg {
reg.kind
} else {
RegKind::from_number(data.reg)
};
compile_modrm_sib(buffer, MOD_NODISP, reg_k, RegKind::Static(RegId::RBP));
buffer.push(Stmt::u32(0));
match ctx.mode {
X86Mode::Long => relocations.push((jump, 0, Size::B_4, RelocationKind::Relative)),
X86Mode::Protected => relocations.push((jump, 0, Size::B_4, RelocationKind::Absolute))
}
}
if let Some(code) = immediate_opcode {
buffer.push(Stmt::u8(code));
relocations.iter_mut().for_each(|r| r.1 += 1);
}
if let Some(SizedArg::Direct {reg: ireg, ..}) = ireg {
let ireg = ireg.kind;
let byte = ireg.encode() << 4;
let mut byte: TokenTree = Literal::u8_suffixed(byte).into();
if let RegKind::Dynamic(_, expr) = ireg {
byte = serialize::expr_mask_shift_or(&byte, &delimited(expr), 0xF, 4);
}
if !args.is_empty() {
if let SizedArg::Immediate {value, size: Size::BYTE} = args.remove(0) {
byte = serialize::expr_mask_shift_or(&byte, &delimited(value), 0xF, 0);
} else {
panic!("bad formatting data")
}
}
buffer.push(Stmt::ExprUnsigned(byte, Size::BYTE));
relocations.iter_mut().for_each(|r| r.1 += 1);
}
for arg in args {
match arg {
SizedArg::Immediate {value, size} => {
buffer.push(Stmt::ExprSigned(delimited(value), size));
relocations.iter_mut().for_each(|r| r.1 += size.in_bytes());
},
SizedArg::JumpTarget {jump, size} => {
buffer.push(Stmt::Const(0, size));
relocations.iter_mut().for_each(|r| r.1 += size.in_bytes());
if let JumpKind::Bare(_) = &jump.kind {
match ctx.mode {
X86Mode::Protected => relocations.push((jump, 0, size, RelocationKind::Extern)),
X86Mode::Long => return Err(Some("Extern relocations are not supported in x64 mode".to_string()))
}
} else {
relocations.push((jump, 0, size, RelocationKind::Relative));
}
},
_ => panic!("bad immediate data")
};
}
for (target, offset, size, kind) in relocations {
let data = [size.in_bytes(), kind.to_id()];
let data = match ctx.mode {
X86Mode::Protected => &data,
X86Mode::Long => &data[..1],
};
buffer.push(target.encode(offset + size.in_bytes(), 0, data));
}
Ok(())
}
fn clean_memoryref(arg: RawArg) -> Result<CleanArg, Option<String>> {
Ok(match arg {
RawArg::Direct {reg} => CleanArg::Direct {reg},
RawArg::JumpTarget {jump, size} => CleanArg::JumpTarget {jump, size},
RawArg::IndirectJumpTarget {jump, size} => {
if let JumpKind::Bare(_) = jump.kind {
return Err(Some("Extern indirect jumps are not supported. Use a displacement".to_string()))
}
CleanArg::IndirectJumpTarget {jump, size}
},
RawArg::Immediate {value, size} => CleanArg::Immediate {value, size},
RawArg::Invalid => return Err(None),
RawArg::IndirectRaw {span, value_size, nosplit, disp_size, items} => {
let mut scaled = Vec::new();
let mut regs = Vec::new();
let mut disps = Vec::new();
for item in items {
match item {
MemoryRefItem::Register(reg) => regs.push(reg),
MemoryRefItem::ScaledRegister(reg, value) => scaled.push((reg, value)),
MemoryRefItem::Displacement(expr) => disps.push(expr)
}
}
let mut base_reg_index = None;
for (i, reg) in regs.iter().enumerate() {
if !(regs.iter().enumerate().any(|(j, other)| i != j && reg == other) ||
scaled.iter().any(|(other, _)| reg == other)) {
base_reg_index = Some(i);
break;
}
}
let mut base = base_reg_index.map(|i| regs.remove(i));
scaled.extend(regs.into_iter().map(|r| (r, 1)));
let mut joined_regs = Vec::new();
for (reg, s) in scaled {
if let Some(i) = joined_regs.iter().position(|(other, _)| ® == other) {
joined_regs[i].1 += s;
} else {
joined_regs.push((reg, s));
}
}
let index = if base.is_none() {
base_reg_index = joined_regs.iter().position(|&(_, s)| s == 1);
base = base_reg_index.map(|i| joined_regs.remove(i).0);
let index = joined_regs.pop();
if nosplit && index.is_none() && base.is_some() {
base.take().map(|reg| (reg, 1))
} else {
index
}
} else {
joined_regs.pop()
};
if !joined_regs.is_empty() {
emit_error!(span, "Impossible memory argument");
return Err(None);
}
let disp = serialize::expr_add_many(span, disps.into_iter().map(delimited));
let disp = disp.map(|d| serialize::reparse(&d).expect("Invalid expression generated internally"));
CleanArg::Indirect {
span,
nosplit,
size: value_size,
disp_size,
base,
index: index.map(|(r, s)| (r, s, None)),
disp,
}
},
RawArg::TypeMappedRaw {span, base_reg, scale, value_size, nosplit, disp_size, scaled_items, attribute} => {
let base = base_reg;
let mut scaled = Vec::new();
let mut disps = Vec::new();
for item in scaled_items {
match item {
MemoryRefItem::Register(reg) => scaled.push((reg, 1)),
MemoryRefItem::ScaledRegister(reg, scale) => scaled.push((reg, scale)),
MemoryRefItem::Displacement(expr) => disps.push(expr)
}
}
let mut joined_regs = Vec::new();
for (reg, s) in scaled {
if let Some(i) = joined_regs.iter().position(|(other, _)| ® == other) {
joined_regs[i].1 += s;
} else {
joined_regs.push((reg, s));
}
}
let index = joined_regs.pop();
if !joined_regs.is_empty() {
emit_error!(span, "Impossible memory argument");
return Err(None);
}
let true_disp_size = disp_size.unwrap_or(Size::B_4);
let scaled_disp = serialize::expr_add_many(span, disps.into_iter().map(delimited));
let scaled_disp = scaled_disp.map(|disp| serialize::expr_size_of_scale(&scale, &disp, true_disp_size));
let attr_disp = attribute.map(|attr| serialize::expr_offset_of(&scale, &attr, true_disp_size));
let disp = if let Some(scaled_disp) = scaled_disp {
if let Some(attr_disp) = attr_disp {
serialize::expr_add_many(span, vec![attr_disp, scaled_disp].into_iter())
} else {
Some(scaled_disp)
}
} else {
attr_disp
};
let disp = disp.map(|d| serialize::reparse(&d).expect("Invalid expression generated internally"));
let index = index.map(|(r, s)| {
let scale_expr = serialize::reparse(&serialize::expr_size_of(&scale)).expect("Invalid expression generated internally");
(r, s, Some(scale_expr))
});
CleanArg::Indirect {
span,
nosplit,
size: value_size,
disp_size,
base: Some(base),
index,
disp,
}
},
})
}
fn sanitize_indirects_and_sizes(ctx: &Context, args: &mut [CleanArg]) -> Result<Option<Size>, Option<String>> {
let mut addr_size = None;
let mut encountered_indirect = false;
for arg in args.iter_mut() {
match *arg {
CleanArg::Indirect {span, nosplit, ref mut disp_size, ref mut base, ref mut index, ref disp, ..} => {
if encountered_indirect {
emit_error!(span, "Multiple memory references in a single instruction")
}
encountered_indirect = true;
addr_size = sanitize_indirect(ctx, span, nosplit, base, index)?;
if let Some((_, scale, _)) = *index {
if encode_scale(scale).is_none() {
emit_error!(span, "Impossible scale");
}
}
if let Some(size) = *disp_size {
if disp.is_none() {
emit_error!(span, "Displacement size without displacement");
}
if addr_size == Some(Size::B_2) {
if size != Size::BYTE && size != Size::B_2 {
emit_error!(span, "Invalid displacement size, only BYTE or B_2 are possible");
}
} else if size != Size::BYTE && size != Size::B_4 {
emit_error!(span, "Invalid displacement size, only BYTE or B_4 are possible");
}
} else if let Some(ref disp) = *disp {
match derive_size(disp) {
Some(Size::BYTE) => *disp_size = Some(Size::BYTE),
Some(_) if addr_size == Some(Size::B_2) => *disp_size = Some(Size::B_2),
Some(_) => *disp_size = Some(Size::B_4),
None => ()
}
}
},
CleanArg::Immediate {ref value, size: ref mut size @ None} => {
*size = derive_size(value);
},
_ => ()
}
}
Ok(addr_size)
}
fn derive_size(expr: &syn::Expr) -> Option<Size> {
let mut inner = expr;
loop {
inner = match inner {
syn::Expr::Group(syn::ExprGroup { expr, ..}) => expr,
syn::Expr::Paren(syn::ExprParen { expr, .. }) => expr,
_ => break
}
}
match inner {
syn::Expr::Lit(syn::ExprLit { ref lit, .. } ) => match lit {
syn::Lit::Byte(_) => Some(Size::BYTE),
syn::Lit::Int(i) => match i.base10_parse::<i32>() {
Err(_) => Some(Size::B_8),
Ok(x) if x > 0x7FFF || x < -0x8000 => Some(Size::B_4),
Ok(x) if x > 0x7F || x < -0x80 => Some(Size::B_2),
Ok(_) => Some(Size::BYTE),
},
_ => None
},
syn::Expr::Unary(syn::ExprUnary { op: syn::UnOp::Neg(_), ref expr, .. } ) => match &**expr {
syn::Expr::Lit(syn::ExprLit { ref lit, .. } ) => match lit {
syn::Lit::Byte(_) => Some(Size::BYTE),
syn::Lit::Int(i) => match i.base10_parse::<i64>() {
Err(_) => Some(Size::B_8),
Ok(x) if x > 0x8000_0000 || x < -0x7FFF_FFFF => Some(Size::B_4),
Ok(x) if x > 0x8000 || x < -0x7FFF => Some(Size::B_4),
Ok(x) if x > 0x80 || x < -0x7F => Some(Size::B_2),
Ok(_) => Some(Size::BYTE),
},
_ => None
},
_ => None
},
_ => None
}
}
fn sanitize_indirect(ctx: &Context, span: Span, nosplit: bool, base: &mut Option<Register>,
index: &mut Option<(Register, isize, Option<syn::Expr>)>) -> Result<Option<Size>, Option<String>>
{
let b = base.as_ref().map(|b| (b.kind.family(), b.size()));
let i = index.as_ref().map(|i| (i.0.kind.family(), i.0.size()));
let size;
let family;
let mut vsib_mode = false;
match (&b, &i) {
(&None, &None) => return Ok(None),
(&Some((f, s)), &None) |
(&None, &Some((f, s))) => {
size = s;
family = f;
},
(&Some((f1, s1)), &Some((f2, s2))) => if f1 == f2 {
if s1 != s2 {
emit_error!(span, "Registers of differing sizes");
return Err(None);
}
size = s1;
family = f1;
} else if f1 == RegFamily::XMM {
vsib_mode = true;
size = s2;
family = f2;
} else if f2 == RegFamily::XMM {
vsib_mode = true;
size = s1;
family = f1;
} else {
emit_error!(span, "Register type combination not supported");
return Err(None);
}
}
match family {
RegFamily::RIP => if b.is_some() && i.is_some() {
emit_error!(span, "Register type combination not supported");
return Err(None);
},
RegFamily::LEGACY => match size {
Size::B_4 => (),
Size::B_8 => (), Size::B_2 => if ctx.mode == X86Mode::Protected || vsib_mode {
emit_error!(span, "16-bit addressing is not supported in this mode");
return Err(None);
},
_ => {
emit_error!(span, "Register type not supported");
return Err(None);
}
},
RegFamily::XMM => if b.is_some() && i.is_some() {
emit_error!(span, "Register type combination not supported");
},
_ => {
emit_error!(span, "Register type not supported");
return Err(None);
}
}
if family == RegFamily::RIP {
match index.take() {
Some((index, 1, None)) => *base = Some(index),
Some(_) => {
emit_error!(span, "RIP cannot be scaled");
return Err(None);
},
None => ()
}
return Ok(Some(size));
}
if family == RegFamily::XMM {
if let Some(reg) = base.take() {
*index = Some((reg, 1, None));
}
return Ok(None);
}
if vsib_mode {
if base.as_ref().unwrap().kind.family() == RegFamily::XMM {
if let (ref mut i, 1, None) = index.as_mut().unwrap() {
swap(i, base.as_mut().unwrap())
} else {
emit_error!(span, "vsib addressing requires a general purpose register as base");
return Err(None);
}
}
return Ok(Some(size));
}
if size == Size::B_2 {
let mut first_reg = base.take();
let mut second_reg = match index.take() {
Some((i, 1, None)) => Some(i),
None => None,
Some(_) => {
emit_error!(span, "16-bit addressing with scaled index");
return Err(None);
},
};
if first_reg.is_none() {
first_reg = second_reg.take();
}
let encoded_base = match (&first_reg, &second_reg) {
(r1, r2) if (r1 == &RegId::RBX && r2 == &RegId::RSI) ||
(r1 == &RegId::RSI && r2 == &RegId::RBX) => RegId::from_number(0),
(r1, r2) if (r1 == &RegId::RBX && r2 == &RegId::RDI) ||
(r1 == &RegId::RDI && r2 == &RegId::RBX) => RegId::from_number(1),
(r1, r2) if (r1 == &RegId::RBP && r2 == &RegId::RSI) ||
(r1 == &RegId::RSI && r2 == &RegId::RBP) => RegId::from_number(2),
(r1, r2) if (r1 == &RegId::RBP && r2 == &RegId::RDI) ||
(r1 == &RegId::RDI && r2 == &RegId::RBP) => RegId::from_number(3),
(r, None) if r == &RegId::RSI => RegId::from_number(4),
(r, None) if r == &RegId::RDI => RegId::from_number(5),
(r, None) if r == &RegId::RBP => RegId::from_number(6),
(r, None) if r == &RegId::RBX => RegId::from_number(7),
_ => {
emit_error!(span, "Impossible register combination");
return Err(None);
}
};
*base = Some(Register::new_static(Size::B_2, encoded_base));
return Ok(Some(size));
}
if !nosplit && base.is_none() {
if let Some((ref reg, ref mut scale, None)) = *index {
match *scale {
2 | 3 | 5 | 9 => {
*base = Some(reg.clone());
*scale -= 1
},
_ => ()
}
}
}
if let Some((i, scale, scale_expr)) = index.take() {
if i == RegId::RSP {
if *base != RegId::RSP && scale == 1 && scale_expr.is_none() {
*index = base.take().map(|reg| (reg, 1, None));
*base = Some(i);
} else {
emit_error!(span, "'rsp' cannot be used as index field");
return Err(None);
}
} else {
*index = Some((i, scale, scale_expr))
}
}
if index.is_none() && (*base == RegId::RSP || *base == RegId::R12 || base.as_ref().map_or(false, |r| r.kind.is_dynamic())) {
*index = Some((Register::new_static(size, RegId::RSP), 1, None));
}
Ok(Some(size))
}
fn match_op_format(ctx: &Context, ident: &syn::Ident, args: &[CleanArg]) -> Result<&'static Opdata, Option<String>> {
let name = ident.to_string();
let name = name.as_str();
let data = if let Some(data) = get_mnemnonic_data(name) {
data
} else {
emit_error!(ident, "'{}' is not a valid instruction", name);
return Err(None);
};
for format in data {
if let Ok(()) = match_format_string(ctx, format, args) {
return Ok(format);
}
}
Err(Some(
format!("'{}': argument type/size mismatch, expected one of the following forms:\n{}", name, format_opdata_list(name, data))
))
}
fn match_format_string(ctx: &Context, fmt: &Opdata, args: &[CleanArg]) -> Result<(), &'static str> {
let fmtstr = &fmt.args;
if ctx.mode != X86Mode::Protected && fmt.flags.intersects(Flags::X86_ONLY) {
return Err("Not available in 32-bit mode");
}
if fmtstr.len() != args.len() * 2 {
return Err("argument length mismatch");
}
let mut args = args.iter();
for (code, fsize) in FormatStringIterator::new(fmtstr) {
let arg = args.next().unwrap();
let size = match (code, arg) {
(b'i', &CleanArg::Immediate{size, ..}) |
(b'o', &CleanArg::Immediate{size, ..}) |
(b'o', &CleanArg::JumpTarget{size, ..}) => size,
(x @ b'A' ..= b'P', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::LEGACY &&
reg.kind.code() == Some(x - b'A') => Some(reg.size()),
(x @ b'Q' ..= b'V', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::SEGMENT &&
reg.kind.code() == Some(x - b'Q') => Some(reg.size()),
(b'W', CleanArg::Direct{reg, ..}) if
reg.kind == RegId::CR8 => Some(reg.size()),
(b'X', CleanArg::Direct{reg, ..}) if
reg.kind == RegId::ST0 => Some(reg.size()),
(b'r', CleanArg::Direct{reg, ..}) |
(b'v', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::LEGACY ||
reg.kind.family() == RegFamily::HIGHBYTE => Some(reg.size()),
(b'x', CleanArg::Direct{reg, ..}) |
(b'u', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::MMX => Some(reg.size()),
(b'y', CleanArg::Direct{reg, ..}) |
(b'w', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::XMM => Some(reg.size()),
(b'f', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::FP => Some(reg.size()),
(b's', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::SEGMENT => Some(reg.size()),
(b'c', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::CONTROL => Some(reg.size()),
(b'd', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::DEBUG => Some(reg.size()),
(b'b', CleanArg::Direct{reg, ..}) if
reg.kind.family() == RegFamily::BOUND => Some(reg.size()),
(b'm', &CleanArg::Indirect {size, ref index, ..}) |
(b'u' ..= b'w', &CleanArg::Indirect {size, ref index, ..}) if
index.is_none() || index.as_ref().unwrap().0.kind.family() != RegFamily::XMM => size,
(b'm', &CleanArg::IndirectJumpTarget {size, ..}) |
(b'u' ..= b'w', &CleanArg::IndirectJumpTarget {size, ..}) => size,
(b'k', &CleanArg::Indirect {size, index: Some((ref index, _, _)), ..}) if
(size.is_none() || size == Some(Size::B_4)) &&
index.kind.family() == RegFamily::XMM => Some(index.size()),
(b'l', &CleanArg::Indirect {size, index: Some((ref index, _, _)), ..}) if
(size.is_none() || size == Some(Size::B_8)) &&
index.kind.family() == RegFamily::XMM => Some(index.size()),
_ => return Err("argument type mismatch")
};
if let Some(size) = size {
if !match (fsize, code) {
(b'w', b'i') => size <= Size::B_2,
(b'd', b'i') => size <= Size::B_4,
(b'q', b'i') => size <= Size::B_8,
(b'*', b'i') => size <= Size::B_4,
(b'b', _) => size == Size::BYTE,
(b'w', _) => size == Size::B_2,
(b'd', _) => size == Size::B_4,
(b'q', _) => size == Size::B_8,
(b'f', _) => size == Size::B_6,
(b'p', _) => size == Size::B_10,
(b'o', _) => size == Size::B_16,
(b'h', _) => size == Size::B_32,
(b't', _) => size == Size::B_64,
(b'*', b'k') |
(b'*', b'l') |
(b'*', b'y') |
(b'*', b'w') => size == Size::B_16 || size == Size::B_32,
(b'*', b'r') |
(b'*', b'A' ..= b'P') |
(b'*', b'v') => size == Size::B_2 || size == Size::B_4 || size == Size::B_8,
(b'*', b'm') => true,
(b'*', _) => panic!("Invalid size wildcard"),
(b'?', _) => true,
(b'!', _) => false,
_ => panic!("invalid format string")
} {
return Err("argument size mismatch");
}
} else if fsize != b'*' && fmt.flags.contains(Flags::EXACT_SIZE) {
return Err("alternate variant exists");
}
}
Ok(())
}
fn size_operands(fmt: &Opdata, args: Vec<CleanArg>) -> Result<(Option<Size>, Vec<SizedArg>), Option<String>> {
let mut has_arg = false;
let mut op_size = None;
let mut im_size = None;
for (arg, (_, fsize)) in args.iter().zip(FormatStringIterator::new(fmt.args)) {
if fsize != b'*' {
continue;
}
match *arg {
CleanArg::Direct {ref reg, ..} => {
has_arg = true;
let size = reg.size();
if op_size.map_or(false, |s| s != size,) {
return Err(Some("Conflicting operand sizes".to_string()));
}
op_size = Some(size);
},
CleanArg::IndirectJumpTarget {size, ..} => {
has_arg = true;
if let Some(size) = size {
if op_size.map_or(false, |s| s != size) {
return Err(Some("Conflicting operand sizes".to_string()));
}
op_size = Some(size);
}
}
CleanArg::Indirect {mut size, ref index, ..} => {
has_arg = true;
if let Some((ref reg, _, _)) = *index {
if reg.kind.family() == RegFamily::XMM {
size = Some(reg.size());
}
}
if let Some(size) = size {
if op_size.map_or(false, |s| s != size) {
return Err(Some("Conflicting operand sizes".to_string()));
}
op_size = Some(size);
}
},
CleanArg::Immediate {size, ..} |
CleanArg::JumpTarget {size, ..} => {
if im_size.is_some() {
panic!("Bad formatting data? multiple immediates with wildcard size");
}
im_size = size;
}
}
}
if let Some(o) = op_size {
let ref_im_size = if o > Size::B_4 {Size::B_4} else {o};
if let Some(i) = im_size {
if i > ref_im_size {
return Err(Some("Immediate size mismatch".to_string()));
}
}
im_size = Some(ref_im_size);
} else if has_arg {
return Err(Some("Unknown operand size".to_string()));
}
let mut new_args = Vec::new();
for (arg, (code, fsize)) in args.into_iter().zip(FormatStringIterator::new(fmt.args)) {
let size = match (fsize, code) {
(b'b', _) => Size::BYTE,
(b'w', _) => Size::B_2,
(_, b'k') |
(b'd', _) => Size::B_4,
(_, b'l') |
(b'q', _) => Size::B_8,
(b'f', _) => Size::B_6,
(b'p', _) => Size::B_10,
(b'o', _) => Size::B_16,
(b'h', _) => Size::B_32,
(b'*', b'i') => im_size.unwrap(),
(b'*', _) => op_size.unwrap(),
(b'!', _) => Size::BYTE, _ => unreachable!()
};
new_args.push(match arg {
CleanArg::Direct {reg} =>
SizedArg::Direct {reg},
CleanArg::JumpTarget {jump, ..} =>
SizedArg::JumpTarget {jump, size},
CleanArg::IndirectJumpTarget {jump, ..} =>
SizedArg::IndirectJumpTarget {jump},
CleanArg::Immediate {value, ..} =>
SizedArg::Immediate {value, size},
CleanArg::Indirect {disp_size, base, index, disp, ..} =>
SizedArg::Indirect {disp_size, base, index, disp},
});
}
Ok((op_size, new_args))
}
fn get_legacy_prefixes(fmt: &'static Opdata, idents: Vec<syn::Ident>) -> Result<(Option<u8>, Option<u8>), Option<String>> {
let mut group1 = None;
let mut group2 = None;
for prefix in idents {
let name = prefix.to_string();
let (group, value) = match name.as_str() {
"rep" => if fmt.flags.contains(Flags::REP) {
(&mut group1, 0xF3)
} else {
emit_error!(prefix, "Cannot use prefix {} on this instruction", name);
return Err(None);
},
"repe" |
"repz" => if fmt.flags.contains(Flags::REPE) {
(&mut group1, 0xF3)
} else {
emit_error!(prefix, "Cannot use prefix {} on this instruction", name);
return Err(None);
},
"repnz" |
"repne" => if fmt.flags.contains(Flags::REPE) {
(&mut group1, 0xF2)
} else {
emit_error!(prefix, "Cannot use prefix {} on this instruction", name);
return Err(None);
},
"lock" => if fmt.flags.contains(Flags::LOCK) {
(&mut group1, 0xF0)
} else {
emit_error!(prefix, "Cannot use prefix {} on this instruction", name);
return Err(None);
},
"ss" => (&mut group2, 0x36),
"cs" => (&mut group2, 0x2E),
"ds" => (&mut group2, 0x3E),
"es" => (&mut group2, 0x26),
"fs" => (&mut group2, 0x64),
"gs" => (&mut group2, 0x65),
_ => panic!("unimplemented prefix")
};
if group.is_some() {
emit_error!(prefix, "Duplicate prefix group");
return Err(None);
}
*group = Some(value);
}
Ok((group1, group2))
}
fn check_rex(ctx: &Context, fmt: &'static Opdata, args: &[SizedArg], rex_w: bool) -> Result<bool, Option<String>> {
if ctx.mode == X86Mode::Protected {
if rex_w {
return Err(Some("Does not support 64 bit operand size in 32-bit mode".to_string()));
} else {
return Ok(false);
}
}
let mut requires_rex = rex_w;
let mut requires_no_rex = false;
for (arg, (c, _)) in args.iter().zip(FormatStringIterator::new(fmt.args)) {
if let b'a' ..= b'z' = c {
match *arg {
SizedArg::Direct {ref reg, ..} => {
if reg.kind.family() == RegFamily::HIGHBYTE {
requires_no_rex = true;
} else if reg.kind.is_extended() || (reg.size() == Size::BYTE &&
(reg.kind == RegId::RSP || reg.kind == RegId::RBP || reg.kind == RegId::RSI || reg.kind == RegId::RDI)) {
requires_rex = true;
}
},
SizedArg::Indirect {ref base, ref index, ..} => {
if let Some(ref reg) = *base {
requires_rex = requires_rex || reg.kind.is_extended();
}
if let Some((ref reg, _, _)) = *index {
requires_rex = requires_rex || reg.kind.is_extended();
}
},
_ => (),
}
}
}
if requires_rex && requires_no_rex {
Err(Some("High byte register combined with extended registers or 64-bit operand size".to_string()))
} else {
Ok(requires_rex)
}
}
fn extract_args(fmt: &'static Opdata, args: Vec<SizedArg>) -> (Option<SizedArg>, Option<SizedArg>, Option<SizedArg>, Option<SizedArg>, Vec<SizedArg>) {
let mut memarg = None;
let mut regarg = None;
let mut regs = Vec::new();
let mut immediates = Vec::new();
for (arg, (c, _)) in args.into_iter().zip(FormatStringIterator::new(fmt.args)) {
match c {
b'm' | b'u' | b'v' | b'w' | b'k' | b'l' => if memarg.is_some() {
panic!("multiple memory arguments in format string");
} else {
memarg = Some(regs.len());
regs.push(arg)
},
b'f' | b'x' | b'r' | b'y' | b'b' => regs.push(arg),
b'c' | b'd' | b's' => if regarg.is_some() {
panic!("multiple segment, debug or control registers in format string");
} else {
regarg = Some(regs.len());
regs.push(arg)
},
b'i' | b'o' => immediates.push(arg),
_ => () }
}
let len = regs.len();
if len > 4 {
panic!("too many arguments");
}
let mut regs = regs.drain(..).fuse();
let mut m = None;
let mut r = None;
let mut v = None;
let mut i = None;
if let Some(i) = regarg {
if i == 0 {
r = regs.next();
m = regs.next();
} else {
m = regs.next();
r = regs.next();
}
} else if len == 1 {
m = regs.next();
} else if len == 2 {
if fmt.flags.contains(Flags::ENC_MR) || memarg == Some(0) {
m = regs.next();
r = regs.next();
} else if fmt.flags.contains(Flags::ENC_VM) {
v = regs.next();
m = regs.next();
} else {
r = regs.next();
m = regs.next();
}
} else if len == 3 {
if fmt.flags.contains(Flags::ENC_MR) || memarg == Some(1) {
r = regs.next();
m = regs.next();
v = regs.next();
} else if fmt.flags.contains(Flags::ENC_VM) || memarg == Some(0) {
m = regs.next();
v = regs.next();
r = regs.next();
} else {
r = regs.next();
v = regs.next();
m = regs.next();
}
} else if len == 4 {
if fmt.flags.contains(Flags::ENC_MR) || memarg == Some(2) {
r = regs.next();
v = regs.next();
m = regs.next();
i = regs.next();
} else {
r = regs.next();
v = regs.next();
i = regs.next();
m = regs.next();
}
}
(m, r, v, i, immediates)
}
fn encode_scale(scale: isize) -> Option<u8> {
match scale {
1 => Some(0),
2 => Some(1),
4 => Some(2),
8 => Some(3),
_ => None
}
}
fn compile_rex(buffer: &mut Vec<Stmt>, rex_w: bool, reg: &Option<SizedArg>, rm: &Option<SizedArg>) {
let mut reg_k = RegKind::from_number(0);
let mut index_k = RegKind::from_number(0);
let mut base_k = RegKind::from_number(0);
if let Some(SizedArg::Direct {ref reg, ..}) = *reg {
reg_k = reg.kind.clone();
}
if let Some(SizedArg::Direct {ref reg, ..}) = *rm {
base_k = reg.kind.clone();
}
if let Some(SizedArg::Indirect {ref base, ref index, ..} ) = *rm {
if let Some(ref base) = *base {
base_k = base.kind.clone();
}
if let Some((ref index, _, _)) = *index {
index_k = index.kind.clone();
}
}
let rex = 0x40 | (rex_w as u8) << 3 |
(reg_k.encode() & 8) >> 1 |
(index_k.encode() & 8) >> 2 |
(base_k.encode() & 8) >> 3 ;
if !reg_k.is_dynamic() && !index_k.is_dynamic() && !base_k.is_dynamic() {
buffer.push(Stmt::u8(rex));
return;
}
let mut rex: TokenTree = Literal::u8_suffixed(rex).into();
if let RegKind::Dynamic(_, expr) = reg_k {
rex = serialize::expr_mask_shift_or(&rex, &delimited(expr), 8, -1);
}
if let RegKind::Dynamic(_, expr) = index_k {
rex = serialize::expr_mask_shift_or(&rex, &delimited(expr), 8, -2);
}
if let RegKind::Dynamic(_, expr) = base_k {
rex = serialize::expr_mask_shift_or(&rex, &delimited(expr), 8, -3);
}
buffer.push(Stmt::ExprUnsigned(rex, Size::BYTE));
}
fn compile_vex_xop(mode: X86Mode, buffer: &mut Vec<Stmt>, data: &'static Opdata, reg: &Option<SizedArg>,
rm: &Option<SizedArg>, map_sel: u8, rex_w: bool, vvvv: &Option<SizedArg>, vex_l: bool, prefix: u8) {
let mut reg_k = RegKind::from_number(0);
let mut index_k = RegKind::from_number(0);
let mut base_k = RegKind::from_number(0);
let mut vvvv_k = RegKind::from_number(0);
let byte1 = match mode {
X86Mode::Long => {
if let Some(SizedArg::Direct {ref reg, ..}) = *reg {
reg_k = reg.kind.clone();
}
if let Some(SizedArg::Direct {ref reg, ..}) = *rm {
base_k = reg.kind.clone();
}
if let Some(SizedArg::Indirect {ref base, ref index, ..}) = *rm {
if let Some(ref base) = *base {
base_k = base.kind.clone();
}
if let Some((ref index, _, _)) = *index {
index_k = index.kind.clone();
}
}
(map_sel & 0x1F) |
(!reg_k.encode() & 8) << 4 |
(!index_k.encode() & 8) << 3 |
(!base_k.encode() & 8) << 2
},
X86Mode::Protected => {
(map_sel & 0x1f) | 0xE0
}
};
if let Some(SizedArg::Direct {ref reg, ..}) = *vvvv {
vvvv_k = reg.kind.clone();
}
let byte2 = (prefix & 0x3) |
(rex_w as u8) << 7 |
(!vvvv_k.encode() & 0xF) << 3 |
(vex_l as u8) << 2 ;
if data.flags.contains(Flags::VEX_OP) && (byte1 & 0x7F) == 0x61 && (byte2 & 0x80) == 0 &&
((!index_k.is_dynamic() && !base_k.is_dynamic()) || mode == X86Mode::Protected) {
buffer.push(Stmt::u8(0xC5));
let byte1 = (byte1 & 0x80) | (byte2 & 0x7F);
if !reg_k.is_dynamic() && !vvvv_k.is_dynamic() {
buffer.push(Stmt::u8(byte1));
return;
}
let mut byte1: TokenTree = Literal::u8_suffixed(byte1).into();
if let RegKind::Dynamic(_, expr) = reg_k {
byte1 = serialize::expr_mask_shift_inverted_and(&byte1, &delimited(expr), 8, 4)
}
if let RegKind::Dynamic(_, expr) = vvvv_k {
byte1 = serialize::expr_mask_shift_inverted_and(&byte1, &delimited(expr), 0xF, 3)
}
buffer.push(Stmt::ExprUnsigned(byte1, Size::BYTE));
return;
}
buffer.push(Stmt::u8(if data.flags.contains(Flags::VEX_OP) {0xC4} else {0x8F}));
if mode == X86Mode::Long && (reg_k.is_dynamic() || index_k.is_dynamic() || base_k.is_dynamic()) {
let mut byte1: TokenTree = Literal::u8_suffixed(byte1).into();
if let RegKind::Dynamic(_, expr) = reg_k {
byte1 = serialize::expr_mask_shift_inverted_and(&byte1, &delimited(expr), 8, 4);
}
if let RegKind::Dynamic(_, expr) = index_k {
byte1 = serialize::expr_mask_shift_inverted_and(&byte1, &delimited(expr), 8, 3);
}
if let RegKind::Dynamic(_, expr) = base_k {
byte1 = serialize::expr_mask_shift_inverted_and(&byte1, &delimited(expr), 8, 2);
}
buffer.push(Stmt::ExprUnsigned(byte1, Size::BYTE));
} else {
buffer.push(Stmt::u8(byte1));
}
if vvvv_k.is_dynamic() {
let mut byte2: TokenTree = Literal::u8_suffixed(byte2).into();
if let RegKind::Dynamic(_, expr) = vvvv_k {
byte2 = serialize::expr_mask_shift_inverted_and(&byte2, &delimited(expr), 0xF, 3)
}
buffer.push(Stmt::ExprUnsigned(byte2, Size::BYTE));
} else {
buffer.push(Stmt::u8(byte2));
}
}
fn compile_modrm_sib(buffer: &mut Vec<Stmt>, mode: u8, reg1: RegKind, reg2: RegKind) {
let byte = mode << 6 |
(reg1.encode() & 7) << 3 |
(reg2.encode() & 7) ;
if !reg1.is_dynamic() && !reg2.is_dynamic() {
buffer.push(Stmt::u8(byte));
return;
}
let mut byte: TokenTree = Literal::u8_suffixed(byte).into();
if let RegKind::Dynamic(_, expr) = reg1 {
byte = serialize::expr_mask_shift_or(&byte, &delimited(expr), 7, 3);
}
if let RegKind::Dynamic(_, expr) = reg2 {
byte = serialize::expr_mask_shift_or(&byte, &delimited(expr), 7, 0);
}
buffer.push(Stmt::ExprUnsigned(byte, Size::BYTE));
}
fn compile_sib_dynscale(buffer: &mut Vec<Stmt>, scale: u8, scale_expr: syn::Expr, reg1: RegKind, reg2: RegKind) {
let byte = (reg1.encode() & 7) << 3 |
(reg2.encode() & 7) ;
let mut byte: TokenTree = Literal::u8_suffixed(byte).into();
let scale: TokenTree = Literal::u8_unsuffixed(scale).into();
if let RegKind::Dynamic(_, expr) = reg1 {
byte = serialize::expr_mask_shift_or(&byte, &delimited(expr), 7, 3);
}
if let RegKind::Dynamic(_, expr) = reg2 {
byte = serialize::expr_mask_shift_or(&byte, &delimited(expr), 7, 0);
}
let span = scale_expr.span();
let scale_expr = delimited(scale_expr);
let expr = delimited(quote_spanned!{ span=>
((
match #scale_expr * #scale {
8 => 3,
4 => 2,
2 => 1,
1 => 0,
_ => panic!("Type size not representable as scale")
}
& 3) << 6) | #byte
});
buffer.push(Stmt::ExprUnsigned(expr, Size::BYTE));
}