use std::io;
use std::collections::hash_map::Entry;
use std::collections::BTreeMap;
use std::sync::{Arc, RwLock, RwLockWriteGuard};
use std::mem;
use fnv::FnvHashMap;
use crate::{DynamicLabel, AssemblyOffset, DynasmError, LabelKind, DynasmLabelApi};
use crate::mmap::{ExecutableBuffer, MutableBuffer};
use crate::relocations::{Relocation, RelocationKind, RelocationSize, ImpossibleRelocation};
use crate::cache_control;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticLabel {
name: &'static str,
version: usize,
}
impl StaticLabel {
pub fn global(name: &'static str) -> StaticLabel {
StaticLabel {
name,
version: 0
}
}
pub fn local(name: &'static str, version: usize) -> StaticLabel {
StaticLabel {
name,
version
}
}
pub fn is_global(&self) -> bool {
self.version == 0
}
pub fn is_local(&self) -> bool {
self.version != 0
}
pub fn next(mut self) -> StaticLabel {
self.version += (self.version != 0) as usize;
self
}
pub fn first(name: &'static str) -> StaticLabel {
StaticLabel {
name,
version: 1
}
}
pub fn get_name(&self) -> &'static str {
self.name
}
}
#[derive(Debug)]
pub struct MemoryManager {
execbuffer: Arc<RwLock<ExecutableBuffer>>,
execbuffer_size: usize,
asmoffset: usize,
execbuffer_addr: usize
}
impl MemoryManager {
pub fn new(initial_mmap_size: usize) -> io::Result<Self> {
let execbuffer = ExecutableBuffer::new(initial_mmap_size)?;
let execbuffer_addr = execbuffer.as_ptr() as usize;
Ok(MemoryManager {
execbuffer: Arc::new(RwLock::new(execbuffer)),
execbuffer_size: initial_mmap_size,
asmoffset: 0,
execbuffer_addr
})
}
pub fn committed(&self) -> usize {
self.asmoffset
}
pub fn execbuffer_addr(&self) -> usize {
self.execbuffer_addr
}
pub fn commit<F>(&mut self, new: &mut Vec<u8>, f: F) where F: FnOnce(&mut [u8], usize, usize) {
let old_asmoffset = self.asmoffset;
let new_asmoffset = self.asmoffset + new.len();
if old_asmoffset >= new_asmoffset {
return;
}
if new_asmoffset > self.execbuffer_size {
while self.execbuffer_size <= new_asmoffset {
self.execbuffer_size *= 2;
}
let mut new_buffer = MutableBuffer::new(self.execbuffer_size).expect("Could not allocate a larger buffer");
new_buffer.set_len(new_asmoffset);
new_buffer[.. old_asmoffset].copy_from_slice(&self.execbuffer.read().unwrap());
new_buffer[old_asmoffset..].copy_from_slice(&new);
let new_buffer_addr = new_buffer.as_ptr() as usize;
f(&mut new_buffer, self.execbuffer_addr, new_buffer_addr);
cache_control::synchronize_icache(&new_buffer);
self.execbuffer_addr = new_buffer_addr;
*self.execbuffer.write().unwrap() = new_buffer.make_exec().expect("Could not swap buffer protection modes")
} else {
let mut lock = self.write();
let buffer = mem::replace(&mut *lock, ExecutableBuffer::default());
let mut buffer = buffer.make_mut().expect("Could not swap buffer protection modes");
buffer.set_len(new_asmoffset);
buffer[old_asmoffset..].copy_from_slice(&new);
cache_control::synchronize_icache(&buffer[old_asmoffset .. ]);
let buffer = buffer.make_exec().expect("Could not swap buffer protection modes");
*lock = buffer;
}
new.clear();
self.asmoffset = new_asmoffset;
}
pub fn write(&self) -> RwLockWriteGuard<ExecutableBuffer> {
self.execbuffer.write().unwrap()
}
pub fn finalize(self) -> Result<ExecutableBuffer, Self> {
match Arc::try_unwrap(self.execbuffer) {
Ok(execbuffer) => Ok(execbuffer.into_inner().unwrap()),
Err(arc) => Err(Self {
execbuffer: arc,
..self
})
}
}
pub fn reader(&self) -> Arc<RwLock<ExecutableBuffer>> {
self.execbuffer.clone()
}
}
#[derive(Debug, Clone, Default)]
pub struct LabelRegistry {
static_labels: FnvHashMap<StaticLabel, AssemblyOffset>,
dynamic_labels: Vec<Option<AssemblyOffset>>,
local_versions: FnvHashMap<&'static str, usize>,
}
impl LabelRegistry {
pub fn new() -> LabelRegistry {
LabelRegistry {
static_labels: FnvHashMap::default(),
dynamic_labels: Vec::new(),
local_versions: FnvHashMap::default(),
}
}
pub fn with_capacity(locals: usize, globals: usize, dynamics: usize) -> LabelRegistry {
LabelRegistry {
static_labels: FnvHashMap::with_capacity_and_hasher(locals + globals, Default::default()),
dynamic_labels: Vec::with_capacity(dynamics),
local_versions: FnvHashMap::with_capacity_and_hasher(locals, Default::default()),
}
}
pub fn clear(&mut self) {
self.static_labels.clear();
self.dynamic_labels.clear();
self.local_versions.clear();
}
pub fn new_dynamic_label(&mut self) -> DynamicLabel {
let id = self.dynamic_labels.len();
self.dynamic_labels.push(None);
DynamicLabel(id)
}
pub fn define_dynamic(&mut self, id: DynamicLabel, offset: AssemblyOffset) -> Result<(), DynasmError> {
match self.dynamic_labels.get_mut(id.0) {
Some(Some(_)) => return Err(DynasmError::DuplicateLabel(LabelKind::Dynamic(id))),
Some(e) => *e = Some(offset),
None => return Err(DynasmError::UnknownLabel(LabelKind::Dynamic(id))),
}
Ok(())
}
pub fn define_global(&mut self, name: &'static str, offset: AssemblyOffset) -> Result<(), DynasmError> {
match self.static_labels.entry(StaticLabel::global(name)) {
Entry::Occupied(_) => Err(DynasmError::DuplicateLabel(LabelKind::Global(name))),
Entry::Vacant(v) => {
v.insert(offset);
Ok(())
}
}
}
pub fn define_local(&mut self, name: &'static str, offset: AssemblyOffset) {
let generation = match self.local_versions.entry(name) {
Entry::Occupied(mut o) => {
*o.get_mut() += 1;
*o.get()
},
Entry::Vacant(v) => {
v.insert(1);
1
}
};
self.static_labels.insert(StaticLabel::local(name, generation), offset);
}
pub fn place_local_reference(&self, name: &'static str) -> Option<StaticLabel> {
self.local_versions.get(name).map(|&version| StaticLabel::local(name, version))
}
pub fn resolve_dynamic(&self, id: DynamicLabel) -> Result<AssemblyOffset, DynasmError> {
self.dynamic_labels.get(id.0).and_then(|&e| e).ok_or_else(|| DynasmError::UnknownLabel(LabelKind::Dynamic(id)))
}
pub fn resolve_static(&self, label: &StaticLabel) -> Result<AssemblyOffset, DynasmError> {
self.static_labels.get(label).cloned().ok_or_else(|| DynasmError::UnknownLabel(
if label.is_global() {
LabelKind::Global(label.name)
} else {
LabelKind::Local(label.name)
}
))
}
}
#[derive(Clone, Debug)]
pub struct PatchLoc<R: Relocation> {
pub location: AssemblyOffset,
pub field_offset: u8,
pub ref_offset: u8,
pub relocation: R,
pub target_offset: isize,
}
impl<R: Relocation> PatchLoc<R> {
pub fn new(location: AssemblyOffset, target_offset: isize, field_offset: u8, ref_offset: u8, relocation: R) -> PatchLoc<R> {
PatchLoc {
location,
field_offset,
ref_offset,
relocation,
target_offset
}
}
pub fn range(&self, buf_offset: usize) -> std::ops::Range<usize> {
let field_offset = self.location.0 - buf_offset - self.field_offset as usize;
field_offset .. field_offset + self.relocation.size()
}
pub fn value(&self, target: usize, buf_addr: usize) -> isize {
(match self.relocation.kind() {
RelocationKind::Relative => target.wrapping_sub(self.location.0 - self.ref_offset as usize),
RelocationKind::RelToAbs => target.wrapping_sub(self.location.0 - self.ref_offset as usize + buf_addr),
RelocationKind::AbsToRel => target + buf_addr
}) as isize + self.target_offset
}
pub fn patch(&self, buffer: &mut [u8], buf_addr: usize, target: usize) -> Result<(), ImpossibleRelocation> {
let value = self.value(target, buf_addr);
self.relocation.write_value(buffer, value)
}
pub fn adjust(&self, buffer: &mut [u8], adjustment: isize) -> Result<(), ImpossibleRelocation> {
let value = self.relocation.read_value(buffer);
let value = match self.relocation.kind() {
RelocationKind::Relative => value,
RelocationKind::RelToAbs => value.wrapping_sub(adjustment),
RelocationKind::AbsToRel => value.wrapping_add(adjustment),
};
self.relocation.write_value(buffer, value)
}
pub fn needs_adjustment(&self) -> bool {
match self.relocation.kind() {
RelocationKind::Relative => false,
RelocationKind::RelToAbs
| RelocationKind::AbsToRel => true,
}
}
}
#[derive(Debug, Default)]
pub struct RelocRegistry<R: Relocation> {
static_targets: Vec<(PatchLoc<R>, StaticLabel)>,
dynamic_targets: Vec<(PatchLoc<R>, DynamicLabel)>,
}
impl<R: Relocation> RelocRegistry<R> {
pub fn new() -> RelocRegistry<R> {
RelocRegistry {
static_targets: Vec::new(),
dynamic_targets: Vec::new(),
}
}
pub fn with_capacity(static_references: usize, dynamic_references: usize) -> RelocRegistry<R> {
RelocRegistry {
static_targets: Vec::with_capacity(static_references),
dynamic_targets: Vec::with_capacity(dynamic_references),
}
}
pub fn add_static(&mut self, label: StaticLabel, patchloc: PatchLoc<R>) {
self.static_targets.push((patchloc, label));
}
pub fn add_dynamic(&mut self, id: DynamicLabel, patchloc: PatchLoc<R>) {
self.dynamic_targets.push((patchloc, id))
}
pub fn take_statics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, StaticLabel)> + 'a {
self.static_targets.drain(..)
}
pub fn take_dynamics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, DynamicLabel)> + 'a {
self.dynamic_targets.drain(..)
}
}
#[derive(Debug, Default)]
pub struct ManagedRelocs<R: Relocation> {
managed: BTreeMap<usize, PatchLoc<R>>
}
impl<R: Relocation> ManagedRelocs<R> {
pub fn new() -> Self {
Self {
managed: BTreeMap::new()
}
}
pub fn add(&mut self, patchloc: PatchLoc<R>) {
self.managed.insert(patchloc.location.0 - patchloc.field_offset as usize, patchloc);
}
pub fn append(&mut self, other: &mut ManagedRelocs<R>) {
self.managed.append(&mut other.managed);
}
pub fn remove_between(&mut self, start: usize, end: usize) {
if start == end {
return;
}
let keys: Vec<_> = self.managed.range(start .. end).map(|(&k, _)| k).collect();
for k in keys {
self.managed.remove(&k);
}
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item=&'a PatchLoc<R>> + 'a {
self.managed.values()
}
}
#[derive(Clone, Debug)]
enum LitPoolEntry {
U8(u8),
U16(u16),
U32(u32),
U64(u64),
Dynamic(RelocationSize, DynamicLabel),
Global(RelocationSize, &'static str),
Forward(RelocationSize, &'static str),
Backward(RelocationSize, &'static str),
Align(u8, usize),
}
#[derive(Clone, Debug, Default)]
pub struct LitPool {
offset: usize,
entries: Vec<LitPoolEntry>,
}
impl LitPool {
pub fn new() -> Self {
LitPool {
offset: 0,
entries: Vec::new(),
}
}
fn bump_offset(&mut self, size: RelocationSize) -> isize {
self.align(size as usize, 0);
let offset = self.offset;
self.offset += size as usize;
offset as isize
}
pub fn align(&mut self, size: usize, with: u8) {
let misalign = self.offset % (size as usize);
if misalign == 0 {
return;
}
self.entries.push(LitPoolEntry::Align(with, size));
self.offset += size as usize - misalign;
}
pub fn push_u8(&mut self, value: u8) -> isize {
let offset = self.bump_offset(RelocationSize::Byte);
self.entries.push(LitPoolEntry::U8(value));
offset
}
pub fn push_u16(&mut self, value: u16) -> isize {
let offset = self.bump_offset(RelocationSize::Word);
self.entries.push(LitPoolEntry::U16(value));
offset
}
pub fn push_u32(&mut self, value: u32) -> isize {
let offset = self.bump_offset(RelocationSize::DWord);
self.entries.push(LitPoolEntry::U32(value));
offset
}
pub fn push_u64(&mut self, value: u64) -> isize {
let offset = self.bump_offset(RelocationSize::QWord);
self.entries.push(LitPoolEntry::U64(value));
offset
}
pub fn push_dynamic(&mut self, id: DynamicLabel, size: RelocationSize) -> isize {
let offset = self.bump_offset(size);
self.entries.push(LitPoolEntry::Dynamic(size, id));
offset
}
pub fn push_global(&mut self, name: &'static str, size: RelocationSize) -> isize {
let offset = self.bump_offset(size);
self.entries.push(LitPoolEntry::Global(size, name));
offset
}
pub fn push_forward(&mut self, name: &'static str, size: RelocationSize) -> isize {
let offset = self.bump_offset(size);
self.entries.push(LitPoolEntry::Forward(size, name));
offset
}
pub fn push_backward(&mut self, name: &'static str, size: RelocationSize) -> isize {
let offset = self.bump_offset(size);
self.entries.push(LitPoolEntry::Backward(size, name));
offset
}
fn pad_sized<D: DynasmLabelApi>(size: RelocationSize, assembler: &mut D) {
match size {
RelocationSize::Byte => assembler.push(0),
RelocationSize::Word => assembler.push_u16(0),
RelocationSize::DWord => assembler.push_u32(0),
RelocationSize::QWord => assembler.push_u64(0),
}
}
pub fn emit<D: DynasmLabelApi>(self, assembler: &mut D) {
for entry in self.entries {
match entry {
LitPoolEntry::U8(value) => assembler.push(value),
LitPoolEntry::U16(value) => assembler.push_u16(value),
LitPoolEntry::U32(value) => assembler.push_u32(value),
LitPoolEntry::U64(value) => assembler.push_u64(value),
LitPoolEntry::Dynamic(size, id) => {
Self::pad_sized(size, assembler);
assembler.dynamic_relocation(id, 0, size as u8, size as u8, D::Relocation::from_size(size));
},
LitPoolEntry::Global(size, name) => {
Self::pad_sized(size, assembler);
assembler.global_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
},
LitPoolEntry::Forward(size, name) => {
Self::pad_sized(size, assembler);
assembler.forward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
},
LitPoolEntry::Backward(size, name) => {
Self::pad_sized(size, assembler);
assembler.backward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
},
LitPoolEntry::Align(with, alignment) => assembler.align(alignment, with),
}
}
}
}
#[cfg(test)]
mod tests {
use crate::*;
use relocations::RelocationSize;
#[test]
fn test_litpool_size() {
test_litpool::<RelocationSize>();
}
#[test]
fn test_litpool_x64() {
test_litpool::<x64::X64Relocation>();
}
#[test]
fn test_litpool_x86() {
test_litpool::<x86::X86Relocation>();
}
#[test]
fn test_litpool_aarch64() {
test_litpool::<aarch64::Aarch64Relocation>();
}
fn test_litpool<R: Relocation + Debug>() {
let mut ops = Assembler::<R>::new().unwrap();
let dynamic1 = ops.new_dynamic_label();
let mut pool = components::LitPool::new();
ops.local_label("backward1");
assert_eq!(pool.push_u8(0x12), 0);
assert_eq!(pool.push_u8(0x34), 1);
assert_eq!(pool.push_u8(0x56), 2);
assert_eq!(pool.push_u16(0x789A), 4);
assert_eq!(pool.push_u32(0xBCDE_F012), 8);
assert_eq!(pool.push_u64(0x3456_789A_BCDE_F012), 16);
assert_eq!(pool.push_forward("forward1", RelocationSize::Byte), 24);
pool.align(4, 0xCC);
assert_eq!(pool.push_global("global1", RelocationSize::Word), 28);
assert_eq!(pool.push_dynamic(dynamic1, RelocationSize::DWord), 32);
assert_eq!(pool.push_backward("backward1", RelocationSize::QWord), 40);
pool.emit(&mut ops);
assert_eq!(ops.offset().0, 48);
ops.local_label("forward1");
ops.global_label("global1");
ops.dynamic_label(dynamic1);
assert_eq!(ops.commit(), Ok(()));
let buf = ops.finalize().unwrap();
assert_eq!(&*buf, &[
0x12, 0x34, 0x56, 0x00, 0x9A, 0x78, 0x00, 0x00,
0x12, 0xF0, 0xDE, 0xBC, 0x00, 0x00, 0x00, 0x00,
0x12, 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
24 , 0xCC, 0xCC, 0xCC, 20 , 0 , 0x00, 0x00,
16 , 0 , 0 , 0 , 0x00, 0x00, 0x00, 0x00,
0xD8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFFu8,
] as &[u8]);
}
}