osom_asm_x86_64/assembler/implementation/
x86_64_assembler.rs1#![allow(clippy::cast_ptr_alignment, clippy::used_underscore_items)]
2
3use std::collections::HashMap;
4
5use osom_asm_common::InlineVec;
6use osom_encoders_x86_64::models::EncodedX86_64Instruction;
7
8use crate::assembler::EmitError;
9use crate::assembler::implementation::fragment::RelaxationVariant;
10use crate::models::Label;
11
12use super::fragment::{Fragment, FragmentOrderId};
13use super::macros::{fragment_at_index, fragment_at_index_mut};
14
15#[derive(Debug, Clone)]
16#[must_use]
17pub(super) struct FragmentRelativePosition {
18 pub fragment_id: FragmentOrderId,
19 pub in_fragment_offset: u32,
20}
21
22#[derive(Debug, Clone)]
23#[must_use]
24pub(super) struct PatchableImm32Instruction {
25 pub instruction_position: FragmentRelativePosition,
26 pub instruction_length: u8,
27 pub imm32_offset: u8,
28}
29
30#[derive(Clone)]
31#[must_use]
32pub struct X86_64Assembler {
33 pub(super) label_offsets: HashMap<Label, FragmentRelativePosition>,
34 pub(super) patchable_addresses: HashMap<Label, InlineVec<PatchableImm32Instruction, 5>>,
35 pub(super) public_labels: Vec<Label>,
36 pub(super) fragments: Vec<u8>,
37 pub(super) last_fragment_offset: u32,
38 pub(super) fragments_count: u32,
39 pub(super) with_relaxation: bool,
40}
41
42const FRAGMENT_SIZE: u32 = size_of::<Fragment>() as u32;
43const FRAGMENT_ALIGNMENT: u32 = align_of::<Fragment>() as u32;
44
45impl X86_64Assembler {
46 #[inline(always)]
47 pub fn new(with_relaxation: bool) -> Self {
48 let mut fragments = Vec::<u8>::with_capacity(1 << 12);
49 let initial_fragment = Fragment::Bytes {
50 data_length: 0,
51 capacity: FRAGMENT_SIZE,
52 };
53 fragments.extend_from_slice(initial_fragment.slice_of_header());
54
55 Self {
56 label_offsets: HashMap::with_capacity(16),
57 patchable_addresses: HashMap::with_capacity(16),
58 public_labels: Vec::with_capacity(4),
59 fragments: fragments,
60 last_fragment_offset: 0,
61 fragments_count: 1,
62 with_relaxation,
63 }
64 }
65
66 pub(super) fn _write_bytes_internal(&mut self, bytes: &[u8]) {
67 if bytes.is_empty() {
68 return;
69 }
70 let bytes_len = bytes.len() as u32;
71 let current_fragment = fragment_at_index_mut!(self, self.last_fragment_offset);
72 if let Fragment::Bytes { data_length, capacity } = current_fragment {
73 *data_length += bytes_len;
74 *capacity = (((*data_length + FRAGMENT_SIZE) / FRAGMENT_ALIGNMENT) + 1) * FRAGMENT_ALIGNMENT;
75 } else {
76 let new_fragment = Fragment::Bytes {
77 data_length: 0,
78 capacity: FRAGMENT_SIZE,
79 };
80 self._push_new_fragment(new_fragment);
81 let Fragment::Bytes { data_length, capacity } = fragment_at_index_mut!(self, self.last_fragment_offset)
82 else {
83 panic!("New fragment is not a bytes fragment.");
84 };
85 *data_length += bytes_len;
86 *capacity = (((*data_length + FRAGMENT_SIZE) / FRAGMENT_ALIGNMENT) + 1) * FRAGMENT_ALIGNMENT;
87 }
88
89 self.fragments.extend_from_slice(bytes);
90 }
91
92 pub(super) fn _current_position(&self) -> FragmentRelativePosition {
93 let current_fragment = fragment_at_index!(self, self.last_fragment_offset);
94 let offset = match current_fragment {
95 Fragment::Bytes { data_length, .. } => *data_length,
96 _ => 0,
97 };
98
99 let fragment_order_id = FragmentOrderId::from_index(self.last_fragment_offset);
100
101 FragmentRelativePosition {
102 fragment_id: fragment_order_id,
103 in_fragment_offset: offset,
104 }
105 }
106
107 #[allow(clippy::needless_pass_by_value)]
108 pub(super) fn _push_new_fragment(&mut self, fragment: Fragment) {
109 let current_fragment = fragment_at_index!(self, self.last_fragment_offset);
110 let padding = match current_fragment {
111 Fragment::Bytes { data_length, capacity } => *capacity - *data_length - FRAGMENT_SIZE,
112 _ => 0,
113 };
114 debug_assert!(
115 padding <= FRAGMENT_ALIGNMENT,
116 "Padding is too large, expected at most {FRAGMENT_ALIGNMENT} bytes, got {padding}"
117 );
118 if padding > 0 {
119 let buffer = [0; FRAGMENT_ALIGNMENT as usize];
120 let slice = &buffer[..padding as usize];
121 self.fragments.extend_from_slice(slice);
122 }
123 self.last_fragment_offset = self.fragments.len() as u32;
124 self.fragments.extend_from_slice(fragment.slice_of_header());
125 self.fragments_count += 1;
126 }
127
128 #[inline(always)]
129 pub(super) const fn _relaxation_variant(&self) -> RelaxationVariant {
130 if self.with_relaxation {
131 RelaxationVariant::Short
132 } else {
133 RelaxationVariant::Long
134 }
135 }
136
137 pub(super) fn _insert_label(&mut self, label: Label) -> Result<(), EmitError> {
138 if self.label_offsets.contains_key(&label) {
139 return Err(EmitError::LabelAlreadyDefined(label));
140 }
141 let label_offset = self._current_position();
142 self.label_offsets.insert(label, label_offset);
143 Ok(())
144 }
145
146 #[inline(always)]
147 pub(super) fn _push_patchable_instruction(&mut self, label: Label, patch_info: PatchableImm32Instruction) {
148 self.patchable_addresses
149 .entry(label)
150 .or_insert_with(InlineVec::new)
151 .push(patch_info);
152 }
153}
154
155impl Default for X86_64Assembler {
156 fn default() -> Self {
157 Self::new(true)
158 }
159}