diff --git a/src/executor.rs b/src/executor.rs index d9fa02406..5be9df9e8 100644 --- a/src/executor.rs +++ b/src/executor.rs @@ -6,7 +6,7 @@ pub use self::{aot::AotNativeExecutor, contract::AotContractExecutor, jit::JitNativeExecutor}; use crate::{ arch::{AbiArgument, ValueWithInfoWrapper}, - error::{panic::ToNativeAssertError, Error}, + error::{panic::ToNativeAssertError, Error, Result}, execution_result::{BuiltinStats, ExecutionResult}, native_panic, runtime::BUILTIN_COSTS, @@ -79,7 +79,7 @@ fn invoke_dynamic( Option, Option, ), -) -> Result { +) -> Result { tracing::info!("Invoking function with signature: {function_signature:?}."); let arena = Bump::new(); let mut invoke_data = Vec::::new(); @@ -106,7 +106,7 @@ fn invoke_dynamic( Ok((!(type_info.is_builtin() && is_zst)).then_some(id)).transpose() }) - .collect::, _>>()? + .collect::>>()? .into_iter() .peekable(); @@ -120,7 +120,7 @@ fn invoke_dynamic( { let layout = ret_types_iter.try_fold(Layout::new::<()>(), |layout, id| { let type_info = registry.get_type(id)?; - Result::<_, Error>::Ok(layout.extend(type_info.layout(registry)?)?.0) + Result::Ok(layout.extend(type_info.layout(registry)?)?.0) })?; let return_ptr = arena.alloc_layout(layout).cast::<()>(); @@ -223,10 +223,14 @@ fn invoke_dynamic( ret_registers.as_mut_ptr(), ); }; - #[cfg(feature = "with-segfault-catcher")] - crate::utils::safe_runner::run_safely(run_trampoline).map_err(Error::SafeRunner)?; - #[cfg(not(feature = "with-segfault-catcher"))] - run_trampoline(); + crate::utils::allocator::run_with_allocator(|| { + #[cfg(feature = "with-segfault-catcher")] + crate::utils::safe_runner::run_safely(run_trampoline).map_err(Error::SafeRunner)?; + #[cfg(not(feature = "with-segfault-catcher"))] + run_trampoline(); + + Result::Ok(()) + })?; // Restore the previous syscall handler and builtin costs. #[cfg(feature = "with-cheatcode")] @@ -387,7 +391,7 @@ fn parse_result( mut return_ptr: Option>, #[cfg(target_arch = "x86_64")] mut ret_registers: [u64; 2], #[cfg(target_arch = "aarch64")] mut ret_registers: [u64; 4], -) -> Result { +) -> Result { let type_info = registry.get_type(type_id)?; // Align the pointer to the actual return value. diff --git a/src/executor/contract.rs b/src/executor/contract.rs index be12c6978..3dcbc1abb 100644 --- a/src/executor/contract.rs +++ b/src/executor/contract.rs @@ -432,10 +432,14 @@ impl AotContractExecutor { ret_registers.as_mut_ptr(), ); }; - #[cfg(feature = "with-segfault-catcher")] - crate::utils::safe_runner::run_safely(run_trampoline).map_err(Error::SafeRunner)?; - #[cfg(not(feature = "with-segfault-catcher"))] - run_trampoline(); + crate::utils::allocator::run_with_allocator(|| { + #[cfg(feature = "with-segfault-catcher")] + crate::utils::safe_runner::run_safely(run_trampoline).map_err(Error::SafeRunner)?; + #[cfg(not(feature = "with-segfault-catcher"))] + run_trampoline(); + + Result::Ok(()) + })?; // Parse final gas. unsafe fn read_value(ptr: &mut NonNull<()>) -> &T { diff --git a/src/utils.rs b/src/utils.rs index 86565162a..e671d735f 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -30,6 +30,7 @@ use std::{ }; use thiserror::Error; +pub mod allocator; mod block_ext; pub mod mem_tracing; mod program_registry_ext; diff --git a/src/utils/allocator.rs b/src/utils/allocator.rs new file mode 100644 index 000000000..4b1b23f6d --- /dev/null +++ b/src/utils/allocator.rs @@ -0,0 +1,109 @@ +use std::{ + alloc::Layout, + cell::UnsafeCell, + collections::{hash_map::Entry, HashMap}, + ptr, +}; + +thread_local! { + static ALLOCATOR: UnsafeCell = UnsafeCell::new(ManagedAllocator::default()); +} + +// TODO: Replace `crate::utils::libc_free`, `crate::utils::libc_malloc`, +// `crate::utils::libc_realloc` with our implementation. +// TODO: Merge runtime crate into library (after #1051). +// TODO: Register runtime symbols (after #1051). + +pub fn register_runtime_symbols(find_symbol: impl Fn(&str) -> Option<*mut ()>) { + if let Some(symbol) = find_symbol("cairo_native__alloc") { + unsafe { + *symbol.cast::<*const ()>() = + impl_alloc as *const extern "C" fn(u64, u64) -> *mut () as *const () + } + } + + if let Some(symbol) = find_symbol("cairo_native__realloc") { + unsafe { + *symbol.cast::<*const ()>() = + impl_realloc as *const extern "C" fn(*mut (), u64) -> *mut () as *const () + } + } + + if let Some(symbol) = find_symbol("cairo_native__free") { + unsafe { + *symbol.cast::<*const ()>() = impl_free as *const extern "C" fn(*mut ()) as *const () + } + } +} + +pub fn run_with_allocator(f: impl FnOnce() -> T) -> T { + let prev_allocator = + ALLOCATOR.with(|x| unsafe { ptr::replace(x.get(), ManagedAllocator::default()) }); + + let result = f(); + + ALLOCATOR.with(|x| unsafe { ptr::write(x.get(), prev_allocator) }); + result +} + +#[derive(Debug, Default)] +struct ManagedAllocator { + allocs: HashMap<*mut u8, Layout>, +} + +impl ManagedAllocator { + pub fn alloc(&mut self, layout: Layout) -> *mut u8 { + let ptr = unsafe { std::alloc::alloc(layout) }; + self.allocs.insert(ptr, layout); + + ptr + } + + pub fn realloc(&mut self, ptr: *mut u8, new_size: usize) -> *mut u8 { + assert!(!ptr.is_null()); + match self.allocs.entry(ptr) { + Entry::Occupied(mut entry) => { + let new_ptr = unsafe { std::alloc::realloc(ptr, *entry.get(), new_size) }; + let new_layout = { + let layout = *entry.get(); + Layout::from_size_align(layout.size(), layout.align()).unwrap() + }; + + if ptr == new_ptr { + entry.insert(new_layout); + } else { + entry.remove(); + self.allocs.insert(new_ptr, new_layout); + } + + new_ptr + } + Entry::Vacant(_) => panic!(), + } + } + + pub fn dealloc(&mut self, ptr: *mut u8) { + let layout = self.allocs.remove(&ptr).unwrap(); + unsafe { std::alloc::dealloc(ptr, layout) } + } +} + +impl Drop for ManagedAllocator { + fn drop(&mut self) { + for (ptr, layout) in self.allocs.drain() { + unsafe { std::alloc::dealloc(ptr, layout) } + } + } +} + +extern "C" fn impl_alloc(size: u64, align: u64) -> *mut () { + // let layout = Layout::from_size_align(size, align).unwrap(); + + todo!() +} + +extern "C" fn impl_realloc(ptr: *mut (), new_size: u64) -> *mut () { + todo!() +} + +extern "C" fn impl_free(ptr: *mut ()) {}