1//! Shared utility layer for all architecture-specific compilers
23use crate::compiler::Executable;
4use crate::CompilerError;
5use arrayvec::ArrayVec;
6use dynasmrt::mmap::MutableBuffer;
7use dynasmrt::{
8 components::PatchLoc, relocations::Relocation, AssemblyOffset, DynamicLabel, DynasmApi,
9 DynasmLabelApi,
10};
11use std::marker::PhantomData;
1213pub(crate) use dynasmrt::mmap::ExecutableBuffer;
1415/// Our own simple replacement for [`dynasmrt::Assembler`]
16///
17/// The default assembler in [`dynasmrt`] has a ton of features we don't need,
18/// but more importantly it will panic if it can't make its memory region
19/// executable. This is a no-go for us, since there is a fallback available.
20///
21/// Our needs are simple: a single un-named label, no relocations, and
22/// no modification after finalization. However, we do need to handle runtime
23/// mmap errors thoroughly.
24#[derive(Debug, Clone)]
25pub(crate) struct Assembler<R: Relocation, const S: usize> {
26/// Temporary fixed capacity buffer for assembling code
27buffer: ArrayVec<u8, S>,
28/// Address of the last "target" label, if any
29target: Option<AssemblyOffset>,
30/// Relocations are applied immediately and not stored.
31phantom: PhantomData<R>,
32}
3334impl<R: Relocation, const S: usize> Assembler<R, S> {
35/// Return the entry point as an [`AssemblyOffset`].
36#[inline(always)]
37pub(crate) fn entry() -> AssemblyOffset {
38 AssemblyOffset(0)
39 }
4041/// Size of the code stored so far, in bytes
42#[inline(always)]
43pub(crate) fn len(&self) -> usize {
44self.buffer.len()
45 }
4647/// Make a new assembler with a temporary buffer but no executable buffer.
48#[inline(always)]
49pub(crate) fn new() -> Self {
50Self {
51 buffer: ArrayVec::new(),
52 target: None,
53 phantom: PhantomData,
54 }
55 }
5657/// Return a new [`Executable`] with the code that's been written so far.
58 ///
59 /// This may fail if we can't allocate some memory, fill it, and mark
60 /// it as executable. For example, a Linux platform with policy to restrict
61 /// `mprotect` will show runtime errors at this point.
62 ///
63 /// Performance note: Semantically it makes more sense to consume the
64 /// `Assembler` instance here, passing it by value. This can result in a
65 /// memcpy that doesn't optimize out, which is a dramatic increase to
66 /// the memory bandwidth required in compilation. We avoid that extra
67 /// copy by only passing a reference.
68#[inline(always)]
69pub(crate) fn finalize(&self) -> Result<Executable, CompilerError> {
70// We never execute code from the buffer until it's complete, and we use
71 // a freshly mmap'ed buffer for each program. Because of this, we don't
72 // need to explicitly clear the icache even on platforms that would
73 // normally want this. If we reuse buffers in the future, this will need
74 // architecture-specific support for icache clearing when a new program
75 // is finalized into a buffer we previously ran.
76let mut mut_buf = MutableBuffer::new(self.buffer.len())?;
77 mut_buf.set_len(self.buffer.len());
78 mut_buf[..].copy_from_slice(&self.buffer);
79Ok(Executable {
80 buffer: mut_buf.make_exec()?,
81 })
82 }
83}
8485impl std::fmt::Debug for Executable {
86/// Debug an [`Executable`] by hex-dumping its contents.
87fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
88write!(
89 f,
90"Executable[{}; {} ]",
91 std::env::consts::ARCH,
92 hex::encode(&self.buffer[..])
93 )
94 }
95}
9697// Reluctantly implement just enough of [`DynasmLabelApi`] for our single backward label.
98impl<R: Relocation, const S: usize> DynasmLabelApi for Assembler<R, S> {
99type Relocation = R;
100101#[inline(always)]
102fn local_label(&mut self, name: &'static str) {
103debug_assert_eq!(name, "target");
104self.target = Some(self.offset());
105 }
106107#[inline(always)]
108fn backward_relocation(
109&mut self,
110 name: &'static str,
111 target_offset: isize,
112 field_offset: u8,
113 ref_offset: u8,
114 kind: R,
115 ) {
116debug_assert_eq!(name, "target");
117let target = self
118.target
119 .expect("generated programs always have a target before branch");
120// Apply the relocation immediately without storing it.
121let loc = PatchLoc::new(self.offset(), target_offset, field_offset, ref_offset, kind);
122let buf = &mut self.buffer[loc.range(0)];
123 loc.patch(buf, 0, target.0)
124 .expect("program relocations are always in range");
125 }
126127fn global_label(&mut self, _name: &'static str) {
128unreachable!();
129 }
130131fn dynamic_label(&mut self, _id: DynamicLabel) {
132unreachable!();
133 }
134135fn bare_relocation(&mut self, _target: usize, _field_offset: u8, _ref_offset: u8, _kind: R) {
136unreachable!();
137 }
138139fn global_relocation(
140&mut self,
141 _name: &'static str,
142 _target_offset: isize,
143 _field_offset: u8,
144 _ref_offset: u8,
145 _kind: R,
146 ) {
147unreachable!();
148 }
149150fn dynamic_relocation(
151&mut self,
152 _id: DynamicLabel,
153 _target_offset: isize,
154 _field_offset: u8,
155 _ref_offset: u8,
156 _kind: R,
157 ) {
158unreachable!();
159 }
160161fn forward_relocation(
162&mut self,
163 _name: &'static str,
164 _target_offset: isize,
165 _field_offset: u8,
166 _ref_offset: u8,
167 _kind: R,
168 ) {
169unreachable!();
170 }
171}
172173impl<R: Relocation, const S: usize> Extend<u8> for Assembler<R, S> {
174#[inline(always)]
175fn extend<T: IntoIterator<Item = u8>>(&mut self, iter: T) {
176self.buffer.extend(iter);
177 }
178}
179180impl<'a, R: Relocation, const S: usize> Extend<&'a u8> for Assembler<R, S> {
181#[inline(always)]
182fn extend<T: IntoIterator<Item = &'a u8>>(&mut self, iter: T) {
183for byte in iter {
184self.buffer.push(*byte);
185 }
186 }
187}
188189impl<R: Relocation, const S: usize> DynasmApi for Assembler<R, S> {
190#[inline(always)]
191fn offset(&self) -> AssemblyOffset {
192 AssemblyOffset(self.buffer.len())
193 }
194195#[inline(always)]
196fn push(&mut self, byte: u8) {
197self.buffer.push(byte);
198 }
199200fn align(&mut self, _alignment: usize, _with: u8) {
201unreachable!();
202 }
203}