1use std::io;
4use std::collections::hash_map::Entry;
5use std::collections::BTreeMap;
6use std::sync::{Arc, RwLock, RwLockWriteGuard};
7use std::mem;
8
9use fnv::FnvHashMap;
10
11use crate::{DynamicLabel, AssemblyOffset, DynasmError, LabelKind, DynasmLabelApi};
12use crate::mmap::{ExecutableBuffer, MutableBuffer};
13use crate::relocations::{Relocation, RelocationKind, RelocationSize, ImpossibleRelocation};
14use crate::cache_control;
15
16#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
27pub struct StaticLabel {
28 name: &'static str,
29 version: usize,
30}
31
32impl StaticLabel {
33 pub fn global(name: &'static str) -> StaticLabel {
35 StaticLabel {
36 name,
37 version: 0
38 }
39 }
40
41 pub fn local(name: &'static str, version: usize) -> StaticLabel {
43 StaticLabel {
44 name,
45 version
46 }
47 }
48
49 pub fn is_global(&self) -> bool {
51 self.version == 0
52 }
53
54 pub fn is_local(&self) -> bool {
56 self.version != 0
57 }
58
59 pub fn next(mut self) -> StaticLabel {
62 self.version += (self.version != 0) as usize;
63 self
64 }
65
66 pub fn first(name: &'static str) -> StaticLabel {
68 StaticLabel {
69 name,
70 version: 1
71 }
72 }
73
74 pub fn get_name(&self) -> &'static str {
76 self.name
77 }
78}
79
80#[derive(Debug)]
82pub struct MemoryManager {
83 execbuffer: Arc<RwLock<ExecutableBuffer>>,
85
86 execbuffer_size: usize,
88 asmoffset: usize,
90
91 execbuffer_addr: usize
93}
94
95impl MemoryManager {
96 pub fn new(initial_mmap_size: usize) -> io::Result<Self> {
98 let execbuffer = ExecutableBuffer::new(initial_mmap_size)?;
99 let execbuffer_addr = execbuffer.as_ptr() as usize;
100
101 Ok(MemoryManager {
102 execbuffer: Arc::new(RwLock::new(execbuffer)),
103 execbuffer_size: initial_mmap_size,
104 asmoffset: 0,
105 execbuffer_addr
106 })
107 }
108
109 pub fn committed(&self) -> usize {
111 self.asmoffset
112 }
113
114 pub fn execbuffer_addr(&self) -> usize {
116 self.execbuffer_addr
117 }
118
119 pub fn commit<F>(&mut self, new: &mut Vec<u8>, f: F) where F: FnOnce(&mut [u8], usize, usize) {
122 let old_asmoffset = self.asmoffset;
123 let new_asmoffset = self.asmoffset + new.len();
124
125 if old_asmoffset >= new_asmoffset {
126 return;
127 }
128
129 if new_asmoffset > self.execbuffer_size {
131 while self.execbuffer_size <= new_asmoffset {
132 self.execbuffer_size *= 2;
133 }
134
135 let mut new_buffer = MutableBuffer::new(self.execbuffer_size).expect("Could not allocate a larger buffer");
137 new_buffer.set_len(new_asmoffset);
138
139 new_buffer[.. old_asmoffset].copy_from_slice(&self.execbuffer.read().unwrap());
141 new_buffer[old_asmoffset..].copy_from_slice(new);
142 let new_buffer_addr = new_buffer.as_ptr() as usize;
143
144 f(&mut new_buffer, self.execbuffer_addr, new_buffer_addr);
146
147 cache_control::synchronize_icache(&new_buffer);
149
150 self.execbuffer_addr = new_buffer_addr;
152 *self.execbuffer.write().unwrap() = new_buffer.make_exec().expect("Could not swap buffer protection modes")
153
154 } else {
155
156 let mut lock = self.write();
158 let buffer = mem::replace(&mut *lock, ExecutableBuffer::default());
159 let mut buffer = buffer.make_mut().expect("Could not swap buffer protection modes");
160
161 buffer.set_len(new_asmoffset);
163 buffer[old_asmoffset..].copy_from_slice(new);
164
165 cache_control::synchronize_icache(&buffer[old_asmoffset .. ]);
167
168 let buffer = buffer.make_exec().expect("Could not swap buffer protection modes");
170 *lock = buffer;
171 }
172
173 new.clear();
174 self.asmoffset = new_asmoffset;
175 }
176
177 pub fn write(&self) -> RwLockWriteGuard<ExecutableBuffer> {
179 self.execbuffer.write().unwrap()
180 }
181
182 pub fn finalize(self) -> Result<ExecutableBuffer, Self> {
184 match Arc::try_unwrap(self.execbuffer) {
185 Ok(execbuffer) => Ok(execbuffer.into_inner().unwrap()),
186 Err(arc) => Err(Self {
187 execbuffer: arc,
188 ..self
189 })
190 }
191 }
192
193 pub fn reader(&self) -> Arc<RwLock<ExecutableBuffer>> {
195 self.execbuffer.clone()
196 }
197}
198
199
200#[derive(Debug, Clone, Default)]
204pub struct LabelRegistry {
205 static_labels: FnvHashMap<StaticLabel, AssemblyOffset>,
207 dynamic_labels: Vec<Option<AssemblyOffset>>,
209 local_versions: FnvHashMap<&'static str, usize>,
211}
212
213impl LabelRegistry {
214 pub fn new() -> LabelRegistry {
216 LabelRegistry {
217 static_labels: FnvHashMap::default(),
218 dynamic_labels: Vec::new(),
219 local_versions: FnvHashMap::default(),
220 }
221 }
222
223 pub fn with_capacity(locals: usize, globals: usize, dynamics: usize) -> LabelRegistry {
225 LabelRegistry {
226 static_labels: FnvHashMap::with_capacity_and_hasher(locals + globals, Default::default()),
227 dynamic_labels: Vec::with_capacity(dynamics),
228 local_versions: FnvHashMap::with_capacity_and_hasher(locals, Default::default()),
229 }
230 }
231
232 pub fn clear(&mut self) {
234 self.static_labels.clear();
235 self.dynamic_labels.clear();
236 self.local_versions.clear();
237 }
238
239 pub fn new_dynamic_label(&mut self) -> DynamicLabel {
241 let id = self.dynamic_labels.len();
242 self.dynamic_labels.push(None);
243 DynamicLabel(id)
244 }
245
246 pub fn define_dynamic(&mut self, id: DynamicLabel, offset: AssemblyOffset) -> Result<(), DynasmError> {
248 match self.dynamic_labels.get_mut(id.0) {
249 Some(Some(_)) => return Err(DynasmError::DuplicateLabel(LabelKind::Dynamic(id))),
250 Some(e) => *e = Some(offset),
251 None => return Err(DynasmError::UnknownLabel(LabelKind::Dynamic(id))),
252 }
253 Ok(())
254 }
255
256 pub fn define_global(&mut self, name: &'static str, offset: AssemblyOffset) -> Result<(), DynasmError> {
258 match self.static_labels.entry(StaticLabel::global(name)) {
259 Entry::Occupied(_) => Err(DynasmError::DuplicateLabel(LabelKind::Global(name))),
260 Entry::Vacant(v) => {
261 v.insert(offset);
262 Ok(())
263 }
264 }
265 }
266
267 pub fn define_local(&mut self, name: &'static str, offset: AssemblyOffset) {
269 let generation = match self.local_versions.entry(name) {
270 Entry::Occupied(mut o) => {
271 *o.get_mut() += 1;
272 *o.get()
273 },
274 Entry::Vacant(v) => {
275 v.insert(1);
276 1
277 }
278 };
279 self.static_labels.insert(StaticLabel::local(name, generation), offset);
280 }
281
282 pub fn place_local_reference(&self, name: &'static str) -> Option<StaticLabel> {
285 self.local_versions.get(name).map(|&version| StaticLabel::local(name, version))
286 }
287
288 pub fn resolve_dynamic(&self, id: DynamicLabel) -> Result<AssemblyOffset, DynasmError> {
290 self.dynamic_labels.get(id.0).and_then(|&e| e).ok_or(DynasmError::UnknownLabel(LabelKind::Dynamic(id)))
291 }
292
293 pub fn resolve_static(&self, label: &StaticLabel) -> Result<AssemblyOffset, DynasmError> {
295 self.static_labels.get(label).cloned().ok_or_else(|| DynasmError::UnknownLabel(
296 if label.is_global() {
297 LabelKind::Global(label.name)
298 } else {
299 LabelKind::Local(label.name)
300 }
301 ))
302 }
303}
304
305
306#[derive(Clone, Debug)]
308pub struct PatchLoc<R: Relocation> {
309 pub location: AssemblyOffset,
311 pub field_offset: u8,
313 pub ref_offset: u8,
315 pub relocation: R,
317 pub target_offset: isize,
319}
320
321impl<R: Relocation> PatchLoc<R> {
322 pub fn new(location: AssemblyOffset, target_offset: isize, field_offset: u8, ref_offset: u8, relocation: R) -> PatchLoc<R> {
324 PatchLoc {
325 location,
326 field_offset,
327 ref_offset,
328 relocation,
329 target_offset
330 }
331 }
332
333 pub fn range(&self, buf_offset: usize) -> std::ops::Range<usize> {
337 let field_offset = self.location.0 - buf_offset - self.field_offset as usize;
338 field_offset .. field_offset + self.relocation.size()
339 }
340
341 pub fn value(&self, target: usize, buf_addr: usize) -> isize {
343 (match self.relocation.kind() {
344 RelocationKind::Relative => target.wrapping_sub(self.location.0 - self.ref_offset as usize),
345 RelocationKind::RelToAbs => target.wrapping_sub(self.location.0 - self.ref_offset as usize + buf_addr),
346 RelocationKind::AbsToRel => target + buf_addr
347 }) as isize + self.target_offset
348 }
349
350 pub fn patch(&self, buffer: &mut [u8], buf_addr: usize, target: usize) -> Result<(), ImpossibleRelocation> {
354 let value = self.value(target, buf_addr);
355 self.relocation.write_value(buffer, value)
356 }
357
358 pub fn adjust(&self, buffer: &mut [u8], adjustment: isize) -> Result<(), ImpossibleRelocation> {
362 let value = match self.relocation.kind() {
363 RelocationKind::Relative => return Ok(()),
364 RelocationKind::RelToAbs => self.relocation.read_value(buffer).wrapping_sub(adjustment),
365 RelocationKind::AbsToRel => self.relocation.read_value(buffer).wrapping_add(adjustment),
366 };
367 self.relocation.write_value(buffer, value)
368 }
369
370 pub fn needs_adjustment(&self) -> bool {
372 match self.relocation.kind() {
373 RelocationKind::Relative => false,
374 RelocationKind::RelToAbs
375 | RelocationKind::AbsToRel => true,
376 }
377 }
378}
379
380
381#[derive(Debug, Default)]
383pub struct RelocRegistry<R: Relocation> {
384 static_targets: Vec<(PatchLoc<R>, StaticLabel)>,
385 dynamic_targets: Vec<(PatchLoc<R>, DynamicLabel)>,
386}
387
388impl<R: Relocation> RelocRegistry<R> {
389 pub fn new() -> RelocRegistry<R> {
391 RelocRegistry {
392 static_targets: Vec::new(),
393 dynamic_targets: Vec::new(),
394 }
395 }
396
397 pub fn with_capacity(static_references: usize, dynamic_references: usize) -> RelocRegistry<R> {
399 RelocRegistry {
400 static_targets: Vec::with_capacity(static_references),
401 dynamic_targets: Vec::with_capacity(dynamic_references),
402 }
403 }
404
405 pub fn add_static(&mut self, label: StaticLabel, patchloc: PatchLoc<R>) {
407 self.static_targets.push((patchloc, label));
408 }
409
410 pub fn add_dynamic(&mut self, id: DynamicLabel, patchloc: PatchLoc<R>) {
412 self.dynamic_targets.push((patchloc, id))
413 }
414
415 pub fn take_statics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, StaticLabel)> + 'a {
418 self.static_targets.drain(..)
419 }
420
421 pub fn take_dynamics<'a>(&'a mut self) -> impl Iterator<Item=(PatchLoc<R>, DynamicLabel)> + 'a {
424 self.dynamic_targets.drain(..)
425 }
426}
427
428
429#[derive(Debug, Default)]
432pub struct ManagedRelocs<R: Relocation> {
433 managed: BTreeMap<usize, PatchLoc<R>>
434}
435
436impl<R: Relocation> ManagedRelocs<R> {
437 pub fn new() -> Self {
439 Self {
440 managed: BTreeMap::new()
441 }
442 }
443
444 pub fn add(&mut self, patchloc: PatchLoc<R>) {
446 self.managed.insert(patchloc.location.0 - patchloc.field_offset as usize, patchloc);
447 }
448
449 pub fn append(&mut self, other: &mut ManagedRelocs<R>) {
451 self.managed.append(&mut other.managed);
452 }
453
454 pub fn remove_between(&mut self, start: usize, end: usize) {
459 if start == end {
460 return;
461 }
462
463 let keys: Vec<_> = self.managed.range(start .. end).map(|(&k, _)| k).collect();
464 for k in keys {
465 self.managed.remove(&k);
466 }
467 }
468
469 pub fn iter<'a>(&'a self) -> impl Iterator<Item=&'a PatchLoc<R>> + 'a {
471 self.managed.values()
472 }
473}
474
475
476#[derive(Clone, Debug)]
477enum LitPoolEntry {
478 U8(u8),
479 U16(u16),
480 U32(u32),
481 U64(u64),
482 Dynamic(RelocationSize, DynamicLabel),
483 Global(RelocationSize, &'static str),
484 Forward(RelocationSize, &'static str),
485 Backward(RelocationSize, &'static str),
486 Align(u8, usize),
487}
488
489#[derive(Clone, Debug, Default)]
493pub struct LitPool {
494 offset: usize,
495 entries: Vec<LitPoolEntry>,
496}
497
498impl LitPool {
499 pub fn new() -> Self {
501 LitPool {
502 offset: 0,
503 entries: Vec::new(),
504 }
505 }
506
507 fn bump_offset(&mut self, size: RelocationSize) -> isize {
509 self.align(size as usize, 0);
511 let offset = self.offset;
512 self.offset += size as usize;
513 offset as isize
514 }
515
516 pub fn align(&mut self, size: usize, with: u8) {
518 let misalign = self.offset % size;
519 if misalign == 0 {
520 return;
521 }
522
523 self.entries.push(LitPoolEntry::Align(with, size));
524 self.offset += size - misalign;
525 }
526
527 pub fn push_u8(&mut self, value: u8) -> isize {
529 let offset = self.bump_offset(RelocationSize::Byte);
530 self.entries.push(LitPoolEntry::U8(value));
531 offset
532 }
533
534 pub fn push_u16(&mut self, value: u16) -> isize {
536 let offset = self.bump_offset(RelocationSize::Word);
537 self.entries.push(LitPoolEntry::U16(value));
538 offset
539 }
540
541 pub fn push_u32(&mut self, value: u32) -> isize {
543 let offset = self.bump_offset(RelocationSize::DWord);
544 self.entries.push(LitPoolEntry::U32(value));
545 offset
546 }
547
548 pub fn push_u64(&mut self, value: u64) -> isize {
550 let offset = self.bump_offset(RelocationSize::QWord);
551 self.entries.push(LitPoolEntry::U64(value));
552 offset
553 }
554
555 pub fn push_dynamic(&mut self, id: DynamicLabel, size: RelocationSize) -> isize {
557 let offset = self.bump_offset(size);
558 self.entries.push(LitPoolEntry::Dynamic(size, id));
559 offset
560 }
561
562 pub fn push_global(&mut self, name: &'static str, size: RelocationSize) -> isize {
564 let offset = self.bump_offset(size);
565 self.entries.push(LitPoolEntry::Global(size, name));
566 offset
567 }
568
569 pub fn push_forward(&mut self, name: &'static str, size: RelocationSize) -> isize {
571 let offset = self.bump_offset(size);
572 self.entries.push(LitPoolEntry::Forward(size, name));
573 offset
574 }
575
576 pub fn push_backward(&mut self, name: &'static str, size: RelocationSize) -> isize {
578 let offset = self.bump_offset(size);
579 self.entries.push(LitPoolEntry::Backward(size, name));
580 offset
581 }
582
583 fn pad_sized<D: DynasmLabelApi>(size: RelocationSize, assembler: &mut D) {
584 match size {
585 RelocationSize::Byte => assembler.push(0),
586 RelocationSize::Word => assembler.push_u16(0),
587 RelocationSize::DWord => assembler.push_u32(0),
588 RelocationSize::QWord => assembler.push_u64(0),
589 }
590 }
591
592 pub fn emit<D: DynasmLabelApi>(self, assembler: &mut D) {
594 for entry in self.entries {
595 match entry {
596 LitPoolEntry::U8(value) => assembler.push(value),
597 LitPoolEntry::U16(value) => assembler.push_u16(value),
598 LitPoolEntry::U32(value) => assembler.push_u32(value),
599 LitPoolEntry::U64(value) => assembler.push_u64(value),
600 LitPoolEntry::Dynamic(size, id) => {
601 Self::pad_sized(size, assembler);
602 assembler.dynamic_relocation(id, 0, size as u8, size as u8, D::Relocation::from_size(size));
603 },
604 LitPoolEntry::Global(size, name) => {
605 Self::pad_sized(size, assembler);
606 assembler.global_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
607 },
608 LitPoolEntry::Forward(size, name) => {
609 Self::pad_sized(size, assembler);
610 assembler.forward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
611 },
612 LitPoolEntry::Backward(size, name) => {
613 Self::pad_sized(size, assembler);
614 assembler.backward_relocation(name, 0, size as u8, size as u8, D::Relocation::from_size(size));
615 },
616 LitPoolEntry::Align(with, alignment) => assembler.align(alignment, with),
617 }
618 }
619 }
620}
621
622#[cfg(test)]
623mod tests {
624 use crate::*;
625 use relocations::RelocationSize;
626
627 #[test]
628 fn test_litpool_size() {
629 test_litpool::<RelocationSize>();
630 }
631
632 #[test]
633 fn test_litpool_x64() {
634 test_litpool::<x64::X64Relocation>();
635 }
636
637 #[test]
638 fn test_litpool_x86() {
639 test_litpool::<x86::X86Relocation>();
640 }
641
642 #[test]
643 fn test_litpool_aarch64() {
644 test_litpool::<aarch64::Aarch64Relocation>();
645 }
646
647 fn test_litpool<R: Relocation + Debug>() {
648 let mut ops = Assembler::<R>::new().unwrap();
649 let dynamic1 = ops.new_dynamic_label();
650
651 let mut pool = components::LitPool::new();
652
653 ops.local_label("backward1");
654
655 assert_eq!(pool.push_u8(0x12), 0);
656 assert_eq!(pool.push_u8(0x34), 1);
657 assert_eq!(pool.push_u8(0x56), 2);
658
659 assert_eq!(pool.push_u16(0x789A), 4);
660
661 assert_eq!(pool.push_u32(0xBCDE_F012), 8);
662
663 assert_eq!(pool.push_u64(0x3456_789A_BCDE_F012), 16);
664
665 assert_eq!(pool.push_forward("forward1", RelocationSize::Byte), 24);
666
667 pool.align(4, 0xCC);
668
669 assert_eq!(pool.push_global("global1", RelocationSize::Word), 28);
670
671 assert_eq!(pool.push_dynamic(dynamic1, RelocationSize::DWord), 32);
672
673 assert_eq!(pool.push_backward("backward1", RelocationSize::QWord), 40);
674
675 pool.emit(&mut ops);
676
677 assert_eq!(ops.offset().0, 48);
678
679 ops.local_label("forward1");
680 ops.global_label("global1");
681 ops.dynamic_label(dynamic1);
682
683 assert_eq!(ops.commit(), Ok(()));
684 let buf = ops.finalize().unwrap();
685
686 assert_eq!(&*buf, &[
687 0x12, 0x34, 0x56, 0x00, 0x9A, 0x78, 0x00, 0x00,
688 0x12, 0xF0, 0xDE, 0xBC, 0x00, 0x00, 0x00, 0x00,
689 0x12, 0xF0, 0xDE, 0xBC, 0x9A, 0x78, 0x56, 0x34,
690 24 , 0xCC, 0xCC, 0xCC, 20 , 0 , 0x00, 0x00,
691 16 , 0 , 0 , 0 , 0x00, 0x00, 0x00, 0x00,
692 0xD8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFFu8,
693 ] as &[u8]);
694 }
695}