1use std::io;
4use std::collections::hash_map::Entry;
5use std::collections::BTreeMap;
6use std::sync::{Arc, RwLock, RwLockWriteGuard};
7use std::mem;
8
9use fnv::FnvHashMap;
10
11use crate::{DynamicLabel, AssemblyOffset, DynasmError, LabelKind, DynasmLabelApi};
12use crate::mmap::{ExecutableBuffer, MutableBuffer};
13use crate::relocations::{Relocation, RelocationKind, RelocationSize, ImpossibleRelocation};
14use crate::cache_control;
15
16#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
27pub struct StaticLabel {
28 name: &'static str,
29 version: usize,
30}
31
32impl StaticLabel {
33 pub fn global(name: &'static str) -> StaticLabel {
35 StaticLabel {
36 name,
37 version: 0
38 }
39 }
40
41 pub fn local(name: &'static str, version: usize) -> StaticLabel {
43 StaticLabel {
44 name,
45 version
46 }
47 }
48
49 pub fn is_global(&self) -> bool {
51 self.version == 0
52 }
53
54 pub fn is_local(&self) -> bool {
56 self.version != 0
57 }
58
59 pub fn next(mut self) -> StaticLabel {
62 self.version += (self.version != 0) as usize;
63 self
64 }
65
66 pub fn first(name: &'static str) -> StaticLabel {
68 StaticLabel {
69 name,
70 version: 1
71 }
72 }
73
74 pub fn get_name(&self) -> &'static str {
76 self.name
77 }
78}
79
80#[derive(Debug)]
82pub struct MemoryManager {
83 execbuffer: Arc<RwLock<ExecutableBuffer>>,
85
86 execbuffer_size: usize,
88 asmoffset: usize,
90
91 execbuffer_addr: usize
93}
94
95impl MemoryManager {
96 pub fn new(initial_mmap_size: usize) -> io::Result<Self> {
98 let execbuffer = ExecutableBuffer::new(initial_mmap_size)?;
99 let execbuffer_addr = execbuffer.as_ptr() as usize;
100
101 Ok(MemoryManager {
102 execbuffer: Arc::new(RwLock::new(execbuffer)),
103 execbuffer_size: initial_mmap_size,
104 asmoffset: 0,
105 execbuffer_addr
106 })
107 }
108
109 pub fn committed(&self) -> usize {
111 self.asmoffset
112 }
113
114 pub fn execbuffer_addr(&self) -> usize {
116 self.execbuffer_addr
117 }
118
119 pub fn commit<F>(&mut self, new: &mut Vec<u8>, f: F) where F: FnOnce(&mut [u8], usize, usize) {
122 let old_asmoffset = self.asmoffset;
123 let new_asmoffset = self.asmoffset + new.len();
124
125 if old_asmoffset >= new_asmoffset {
126 return;
127 }
128
129 if new_asmoffset > self.execbuffer_size {
131 while self.execbuffer_size <= new_asmoffset {
132 self.execbuffer_size *= 2;
133 }
134
135 let mut new_buffer = MutableBuffer::new(self.execbuffer_size).expect("Could not allocate a larger buffer");
137 new_buffer.set_len(new_asmoffset);
138
139 new_buffer[.. old_asmoffset].copy_from_slice(&self.execbuffer.read().unwrap());
141 new_buffer[old_asmoffset..].copy_from_slice(new);
142 let new_buffer_addr = new_buffer.as_ptr() as usize;
143
144 f(&mut new_buffer, self.execbuffer_addr, new_buffer_addr);
146
147 cache_control::synchronize_icache(&new_buffer);
149
150 self.execbuffer_addr = new_buffer_addr;
152 *self.execbuffer.write().unwrap() = new_buffer.make_exec().expect("Could not swap buffer protection modes")
153
154 } else {
155
156 let mut lock = self.write();
158 let buffer = mem::replace(&mut *lock, ExecutableBuffer::default());
159 let mut buffer = buffer.make_mut().expect("Could not swap buffer protection modes");
160
161 buffer.set_len(new_asmoffset);
163 buffer[old_asmoffset..].copy_from_slice(new);
164
165 cache_control::synchronize_icache(&buffer[old_asmoffset .. ]);
167
168 let buffer = buffer.make_exec().expect("Could not swap buffer protection modes");
170 *lock = buffer;
171 }
172
173 new.clear();
174 self.asmoffset = new_asmoffset;
175 }
176
177 pub fn write(&self) -> RwLockWriteGuard<'_, ExecutableBuffer> {
179 self.execbuffer.write().unwrap()
180 }
181
182 pub fn finalize(self) -> Result<ExecutableBuffer, Self> {
184 match Arc::try_unwrap(self.execbuffer) {
185 Ok(execbuffer) => Ok(execbuffer.into_inner().unwrap()),
186 Err(arc) => Err(Self {
187 execbuffer: arc,
188 ..self
189 })
190 }
191 }
192
193 pub fn reader(&self) -> Arc<RwLock<ExecutableBuffer>> {
195 self.execbuffer.clone()
196 }
197}
198
199
200#[derive(Debug, Clone, Default)]
204pub struct LabelRegistry {
205 static_labels: FnvHashMap<StaticLabel, AssemblyOffset>,
207 dynamic_labels: Vec<Option<AssemblyOffset>>,
209 local_versions: FnvHashMap<&'static str, usize>,
211}
212
213impl LabelRegistry {
214 pub fn new() -> LabelRegistry {
216 LabelRegistry {
217 static_labels: FnvHashMap::default(),
218 dynamic_labels: Vec::new(),
219 local_versions: FnvHashMap::default(),
220 }
221 }
222
223 pub fn with_capacity(locals: usize, globals: usize, dynamics: usize) -> LabelRegistry {
225 LabelRegistry {
226 static_labels: FnvHashMap::with_capacity_and_hasher(locals + globals, Default::default()),
227 dynamic_labels: Vec::with_capacity(dynamics),
228 local_versions: FnvHashMap::with_capacity_and_hasher(locals, Default::default()),
229 }
230 }
231
232 pub fn clear(&mut self) {
234 self.static_labels.clear();
235 self.dynamic_labels.clear();
236 self.local_versions.clear();
237 }
238
239 pub fn new_dynamic_label(&mut self) -> DynamicLabel {
241 let id = self.dynamic_labels.len();
242 self.dynamic_labels.push(None);
243 DynamicLabel(id)
244 }
245
246 pub fn define_dynamic(&mut self, id: DynamicLabel, offset: AssemblyOffset) -> Result<(), DynasmError> {
248 match self.dynamic_labels.get_mut(id.0) {
249 Some(Some(_)) => return Err(DynasmError::DuplicateLabel(LabelKind::Dynamic(id))),
250 Some(e) => *e = Some(offset),
251 None => return Err(DynasmError::UnknownLabel(LabelKind::Dynamic(id))),
252 }
253 Ok(())
254 }
255
256 pub fn define_global(&mut self, name: &'static str, offset: AssemblyOffset) -> Result<(), DynasmError> {
258 match self.static_labels.entry(StaticLabel::global(name)) {
259 Entry::Occupied(_) => Err(DynasmError::DuplicateLabel(LabelKind::Global(name))),
260 Entry::Vacant(v) => {
261 v.insert(offset);
262 Ok(())
263 }
264 }
265 }
266
267 pub fn define_local(&mut self, name: &'static str, offset: AssemblyOffset) {
269 let generation = match self.local_versions.entry(name) {
270 Entry::Occupied(mut o) => {
271 *o.get_mut() += 1;
272 *o.get()
273 },
274 Entry::Vacant(v) => {
275 v.insert(1);
276 1
277 }
278 };
279 self.static_labels.insert(StaticLabel::local(name, generation), offset);
280 }
281
282 pub fn place_local_reference(&self, name: &'static str) -> Option<StaticLabel> {
285 self.local_versions.get(name).map(|&version| StaticLabel::local(name, version))
286 }
287
288 pub fn resolve_dynamic(&self, id: DynamicLabel) -> Result<AssemblyOffset, DynasmError> {
290 self.dynamic_labels.get(id.0).and_then(|&e| e).ok_or(DynasmError::UnknownLabel(LabelKind::Dynamic(id)))
291 }
292
293 pub fn resolve_static(&self, label: &StaticLabel) -> Result<AssemblyOffset, DynasmError> {
295 self.static_labels.get(label).cloned().ok_or_else(|| DynasmError::UnknownLabel(
296 if label.is_global() {
297 LabelKind::Global(label.name)
298 } else {
299 LabelKind::Local(label.name)
300 }
301 ))
302 }
303}
304
305
306#[derive(Clone, Debug)]
308pub struct PatchLoc<R: Relocation> {
309 pub location: AssemblyOffset,
311 pub field_offset: u8,
313 pub ref_offset: u8,
315 pub relocation: R,
317 pub target_offset: isize,
319}
320
321impl<R: Relocation> PatchLoc<R> {
322 pub fn new(location: AssemblyOffset, target_offset: isize, field_offset: u8, ref_offset: u8, relocation: R) -> PatchLoc<R> {
324 PatchLoc {
325 location,
326 field_offset,
327 ref_offset,
328 relocation,
329 target_offset
330 }
331 }
332
333 pub fn range(&self, buf_offset: usize) -> std::ops::Range<usize> {
337 let field_offset = self.location.0 - buf_offset - self.field_offset as usize;
338 field_offset .. field_offset + self.relocation.size()
339 }
340
341 pub fn value(&self, target: usize, buf_addr: usize) -> isize {
343 (match self.relocation.kind() {
344 RelocationKind::Relative => target.wrapping_sub(self.location.0 - self.ref_offset as usize),
345 RelocationKind::RelToAbs => target.wrapping_sub(self.location.0 - self.ref_offset as usize).wrapping_sub(buf_addr),
346 RelocationKind::AbsToRel => target.wrapping_add(buf_addr),
347 RelocationKind::Absolute => target
348 }) as isize + self.target_offset
349 }
350
351 pub fn patch(&self, buffer: &mut [u8], buf_addr: usize, target: usize) -> Result<(), ImpossibleRelocation> {
355 let value = self.value(target, buf_addr);
356 self.relocation.write_value(buffer, value)
357 }
358
359 pub fn adjust(&self, buffer: &mut [u8], adjustment: isize) -> Result<(), ImpossibleRelocation> {
363 let value = match self.relocation.kind() {
364 RelocationKind::Relative
365 | RelocationKind::Absolute => return Ok(()),
366 RelocationKind::RelToAbs => self.relocation.read_value(buffer).wrapping_sub(adjustment),
367 RelocationKind::AbsToRel => self.relocation.read_value(buffer).wrapping_add(adjustment),
368 };
369 self.relocation.write_value(buffer, value)
370 }
371
372 pub fn needs_adjustment(&self) -> bool {
374 match self.relocation.kind() {
375 RelocationKind::Relative
376 | RelocationKind::Absolute => false,
377 RelocationKind::RelToAbs
378 | RelocationKind::AbsToRel => true,
379 }
380 }
381}
382
383
384#[derive(Debug, Default)]
386pub struct RelocRegistry<R: Relocation> {
387 static_targets: Vec<(PatchLoc<R>, StaticLabel)>,
388 dynamic_targets: Vec<(PatchLoc<R>, DynamicLabel)>,
389}
390
391impl<R: Relocation> RelocRegistry<R> {
392 pub fn new() -> RelocRegistry<R> {
394 RelocRegistry {
395 static_targets: Vec::new(),
396 dynamic_targets: Vec::new(),
397 }
398 }
399
400 pub fn with_capacity(static_references: usize, dynamic_references: usize) -> RelocRegistry<R> {
402 RelocRegistry {
403 static_targets: Vec::with_capacity(static_references),
404 dynamic_targets: Vec::with_capacity(dynamic_references),
405 }
406 }
407
408 pub fn add_static(&mut self, label: StaticLabel, patchloc: PatchLoc<R>) {
410 self.static_targets.push((patchloc, label));
411 }
412
413 pub fn add_dynamic(&mut self, id: DynamicLabel, patchloc: PatchLoc<R>) {
415 self.dynamic_targets.push((patchloc, id))
416 }
417
418 pub fn take_statics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, StaticLabel)> + 'a {
421 self.static_targets.drain(..)
422 }
423
424 pub fn take_dynamics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, DynamicLabel)> + 'a {
427 self.dynamic_targets.drain(..)
428 }
429}
430
431
432#[derive(Debug, Default)]
435pub struct ManagedRelocs<R: Relocation> {
436 managed: BTreeMap<usize, PatchLoc<R>>
437}
438
439impl<R: Relocation> ManagedRelocs<R> {
440 pub fn new() -> Self {
442 Self {
443 managed: BTreeMap::new()
444 }
445 }
446
447 pub fn add(&mut self, patchloc: PatchLoc<R>) {
449 self.managed.insert(patchloc.location.0 - patchloc.field_offset as usize, patchloc);
450 }
451
452 pub fn append(&mut self, other: &mut ManagedRelocs<R>) {
454 self.managed.append(&mut other.managed);
455 }
456
457 pub fn remove_between(&mut self, start: usize, end: usize) {
462 if start == end {
463 return;
464 }
465
466 let keys: Vec<_> = self.managed.range(start .. end).map(|(&k, _)| k).collect();
467 for k in keys {
468 self.managed.remove(&k);
469 }
470 }
471
472 pub fn iter<'a>(&'a self) -> impl Iterator<Item=&'a PatchLoc<R>> + 'a {
474 self.managed.values()
475 }
476}
477
478
479#[derive(Clone, Debug)]
480enum LitPoolEntry {
481 U8(u8),
482 U16(u16),
483 U32(u32),
484 U64(u64),
485 Dynamic(RelocationSize, DynamicLabel, bool),
486 Global(RelocationSize, &'static str, bool),
487 Forward(RelocationSize, &'static str, bool),
488 Backward(RelocationSize, &'static str, bool),
489 Absolute(RelocationSize, usize),
490 Relative(RelocationSize, usize),
491 Align(u8, usize),
492}
493
494#[derive(Clone, Debug, Default)]
498pub struct LitPool {
499 offset: usize,
500 entries: Vec<LitPoolEntry>,
501}
502
503impl LitPool {
504 pub fn new() -> Self {
506 LitPool {
507 offset: 0,
508 entries: Vec::new(),
509 }
510 }
511
512 fn bump_offset(&mut self, size: RelocationSize) -> isize {
514 self.align(size as usize, 0);
516 let offset = self.offset;
517 self.offset += size as usize;
518 offset as isize
519 }
520
521 pub fn align(&mut self, size: usize, with: u8) {
523 let misalign = self.offset % size;
524 if misalign == 0 {
525 return;
526 }
527
528 self.entries.push(LitPoolEntry::Align(with, size));
529 self.offset += size - misalign;
530 }
531
532 pub fn push_u8(&mut self, value: u8) -> isize {
534 let offset = self.bump_offset(RelocationSize::Byte);
535 self.entries.push(LitPoolEntry::U8(value));
536 offset
537 }
538
539 pub fn push_u16(&mut self, value: u16) -> isize {
541 let offset = self.bump_offset(RelocationSize::Word);
542 self.entries.push(LitPoolEntry::U16(value));
543 offset
544 }
545
546 pub fn push_u32(&mut self, value: u32) -> isize {
548 let offset = self.bump_offset(RelocationSize::DWord);
549 self.entries.push(LitPoolEntry::U32(value));
550 offset
551 }
552
553 pub fn push_u64(&mut self, value: u64) -> isize {
555 let offset = self.bump_offset(RelocationSize::QWord);
556 self.entries.push(LitPoolEntry::U64(value));
557 offset
558 }
559
560 pub fn push_dynamic(&mut self, id: DynamicLabel, size: RelocationSize, relative: bool) -> isize {
564 let offset = self.bump_offset(size);
565 self.entries.push(LitPoolEntry::Dynamic(size, id, relative));
566 offset
567 }
568
569 pub fn push_global(&mut self, name: &'static str, size: RelocationSize, relative: bool) -> isize {
573 let offset = self.bump_offset(size);
574 self.entries.push(LitPoolEntry::Global(size, name, relative));
575 offset
576 }
577
578 pub fn push_forward(&mut self, name: &'static str, size: RelocationSize, relative: bool) -> isize {
582 let offset = self.bump_offset(size);
583 self.entries.push(LitPoolEntry::Forward(size, name, relative));
584 offset
585 }
586
587 pub fn push_backward(&mut self, name: &'static str, size: RelocationSize, relative: bool) -> isize {
591 let offset = self.bump_offset(size);
592 self.entries.push(LitPoolEntry::Backward(size, name, relative));
593 offset
594 }
595
596 pub fn push_absolute_as_relative(&mut self, target: usize, size: RelocationSize) -> isize {
598 let offset = self.bump_offset(size);
599 self.entries.push(LitPoolEntry::Absolute(size, target));
600 offset
601 }
602
603 pub fn push_relative_as_absolute(&mut self, target: usize, size: RelocationSize) -> isize {
606 let offset = self.bump_offset(size);
607 self.entries.push(LitPoolEntry::Relative(size, target));
608 offset
609 }
610
611 fn pad_sized<D: DynasmLabelApi>(size: RelocationSize, assembler: &mut D) {
612 match size {
613 RelocationSize::Byte => assembler.push(0),
614 RelocationSize::Word => assembler.push_u16(0),
615 RelocationSize::DWord => assembler.push_u32(0),
616 RelocationSize::QWord => assembler.push_u64(0),
617 }
618 }
619
620 pub fn emit<D: DynasmLabelApi>(self, assembler: &mut D) {
622 for entry in self.entries {
623 match entry {
624 LitPoolEntry::U8(value) => assembler.push(value),
625 LitPoolEntry::U16(value) => assembler.push_u16(value),
626 LitPoolEntry::U32(value) => assembler.push_u32(value),
627 LitPoolEntry::U64(value) => assembler.push_u64(value),
628 LitPoolEntry::Dynamic(size, id, relative) => {
629 Self::pad_sized(size, assembler);
630 let kind = if relative { RelocationKind::Relative } else { RelocationKind::AbsToRel };
631 assembler.dynamic_relocation(id, 0, size as u8, size as u8, D::Relocation::from_size(kind, size));
632 },
633 LitPoolEntry::Global(size, name, relative) => {
634 Self::pad_sized(size, assembler);
635 let kind = if relative { RelocationKind::Relative } else { RelocationKind::AbsToRel };
636 assembler.global_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(kind, size));
637 },
638 LitPoolEntry::Forward(size, name, relative) => {
639 Self::pad_sized(size, assembler);
640 let kind = if relative { RelocationKind::Relative } else { RelocationKind::AbsToRel };
641 assembler.forward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(kind, size));
642 },
643 LitPoolEntry::Backward(size, name, relative) => {
644 Self::pad_sized(size, assembler);
645 let kind = if relative { RelocationKind::Relative } else { RelocationKind::AbsToRel };
646 assembler.backward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(kind, size));
647 },
648 LitPoolEntry::Absolute(size, target) => {
649 Self::pad_sized(size, assembler);
650 let kind = RelocationKind::RelToAbs;
651 assembler.value_relocation(target, size as u8, size as u8, D::Relocation::from_size(kind, size));
652 },
653 LitPoolEntry::Relative(size, target) => {
654 Self::pad_sized(size, assembler);
655 let kind = RelocationKind::AbsToRel;
656 assembler.value_relocation(target, size as u8, size as u8, D::Relocation::from_size(kind, size));
657 },
658 LitPoolEntry::Align(with, alignment) => assembler.align(alignment, with),
659 }
660 }
661 }
662}
663
664#[cfg(test)]
665mod tests {
666 use crate::*;
667 use relocations::{SimpleRelocation, RelocationSize};
668
669 #[test]
670 fn test_litpool_simple() {
671 test_litpool::<SimpleRelocation>();
672 }
673
674 #[test]
675 fn test_litpool_x64() {
676 test_litpool::<x64::X64Relocation>();
677 }
678
679 #[test]
680 fn test_litpool_x86() {
681 test_litpool::<x86::X86Relocation>();
682 }
683
684 #[test]
685 fn test_litpool_aarch64() {
686 test_litpool::<aarch64::Aarch64Relocation>();
687 }
688
689 fn test_litpool<R: Relocation + Debug>() {
690 let mut ops = Assembler::<R>::new().unwrap();
691 let dynamic1 = ops.new_dynamic_label();
692
693 let mut pool = components::LitPool::new();
694
695 ops.local_label("backward1");
696
697 assert_eq!(pool.push_u8(0x12), 0);
698 assert_eq!(pool.push_u8(0x34), 1);
699 assert_eq!(pool.push_u8(0x56), 2);
700
701 assert_eq!(pool.push_u16(0x789A), 4);
702
703 assert_eq!(pool.push_u32(0xBCDE_F012), 8);
704
705 assert_eq!(pool.push_u64(0x3456_789A_BCDE_F012), 16);
706
707 assert_eq!(pool.push_forward("forward1", RelocationSize::Byte, true), 24);
708
709 pool.align(4, 0xCC);
710
711 assert_eq!(pool.push_global("global1", RelocationSize::Word, true), 28);
712
713 assert_eq!(pool.push_dynamic(dynamic1, RelocationSize::DWord, true), 32);
714
715 assert_eq!(pool.push_backward("backward1", RelocationSize::QWord, true), 40);
716
717 pool.emit(&mut ops);
718
719 assert_eq!(ops.offset().0, 48);
720
721 ops.local_label("forward1");
722 ops.global_label("global1");
723 ops.dynamic_label(dynamic1);
724
725 assert_eq!(ops.commit(), Ok(()));
726 let buf = ops.finalize().unwrap();
727
728 assert_eq!(&*buf, &[
729 0x12, 0x34, 0x56, 0x00, 0x9A, 0x78, 0x00, 0x00,
730 0x12, 0xF0, 0xDE, 0xBC, 0x00, 0x00, 0x00, 0x00,
731 0x12, 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
732 24 , 0xCC, 0xCC, 0xCC, 20 , 0 , 0x00, 0x00,
733 16 , 0 , 0 , 0 , 0x00, 0x00, 0x00, 0x00,
734 0xD8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFFu8,
735 ] as &[u8]);
736 }
737}