close to working

This commit is contained in:
Lachlan Sneff 2019-02-19 09:58:01 -08:00
parent e381bbd07b
commit 3c7dc200fa
10 changed files with 239 additions and 206 deletions

View File

@ -2,15 +2,41 @@ use crate::relocation::{ExternalRelocation, TrapSink};
use hashbrown::HashMap;
use wasmer_runtime_core::{
backend::sys::Memory,
backend::{
sys::Memory,
CacheGen,
},
cache::{Cache, Error},
module::ModuleInfo,
module::{ModuleInfo, ModuleInner},
structures::Map,
types::{LocalFuncIndex, SigIndex},
};
use std::{
sync::Arc,
cell::UnsafeCell,
};
use serde_bench::{deserialize, serialize};
pub struct CacheGenerator {
backend_cache: BackendCache,
memory: Arc<Memory>,
}
impl CacheGenerator {
pub fn new(backend_cache: BackendCache, memory: Arc<Memory>) -> Self {
Self { backend_cache, memory }
}
}
impl CacheGen for CacheGenerator {
fn generate_cache(&self, module: &ModuleInner) -> Result<(Box<ModuleInfo>, Box<[u8]>, Arc<Memory>), Error> {
let info = Box::new(module.info.clone());
Err(Error::Unknown("".to_string()))
}
}
#[derive(Serialize, Deserialize)]
pub struct TrampolineCache {
#[serde(with = "serde_bytes")]
@ -22,7 +48,7 @@ pub struct TrampolineCache {
pub struct BackendCache {
pub external_relocs: Map<LocalFuncIndex, Box<[ExternalRelocation]>>,
pub offsets: Map<LocalFuncIndex, usize>,
pub trap_sink: TrapSink,
pub trap_sink: Arc<TrapSink>,
pub trampolines: TrampolineCache,
}

View File

@ -62,36 +62,36 @@ impl Compiler for CraneliftCompiler {
module::Module::from_cache(cache)
}
#[cfg(feature = "cache")]
fn compile_to_backend_cache_data(
&self,
wasm: &[u8],
_: Token,
) -> CompileResult<(Box<ModuleInfo>, Vec<u8>, Memory)> {
validate(wasm)?;
// #[cfg(feature = "cache")]
// fn compile_to_backend_cache_data(
// &self,
// wasm: &[u8],
// _: Token,
// ) -> CompileResult<(Box<ModuleInfo>, Vec<u8>, Memory)> {
// validate(wasm)?;
let isa = get_isa();
// let isa = get_isa();
let mut module = module::Module::new(wasm);
let module_env = module_env::ModuleEnv::new(&mut module, &*isa);
// let mut module = module::Module::new(wasm);
// let module_env = module_env::ModuleEnv::new(&mut module, &*isa);
let func_bodies = module_env.translate(wasm)?;
// let func_bodies = module_env.translate(wasm)?;
let (info, backend_cache, compiled_code) = module
.compile_to_backend_cache(&*isa, func_bodies)
.map_err(|e| CompileError::InternalError {
msg: format!("{:?}", e),
})?;
// let (info, backend_cache, compiled_code) = module
// .compile_to_backend_cache(&*isa, func_bodies)
// .map_err(|e| CompileError::InternalError {
// msg: format!("{:?}", e),
// })?;
let buffer =
backend_cache
.into_backend_data()
.map_err(|e| CompileError::InternalError {
msg: format!("{:?}", e),
})?;
// let buffer =
// backend_cache
// .into_backend_data()
// .map_err(|e| CompileError::InternalError {
// msg: format!("{:?}", e),
// })?;
Ok((Box::new(info), buffer, compiled_code))
}
// Ok((Box::new(info), buffer, compiled_code))
// }
}
fn get_isa() -> Box<isa::TargetIsa> {

View File

@ -1,11 +1,12 @@
#[cfg(feature = "cache")]
use crate::cache::BackendCache;
use crate::cache::{BackendCache, CacheGenerator};
use crate::{resolver::FuncResolverBuilder, signal::Caller, trampoline::Trampolines};
use cranelift_codegen::{ir, isa};
use cranelift_entity::EntityRef;
use cranelift_wasm;
use hashbrown::HashMap;
use std::sync::Arc;
#[cfg(feature = "cache")]
use wasmer_runtime_core::{
@ -67,42 +68,44 @@ impl Module {
) -> CompileResult<ModuleInner> {
let (func_resolver_builder, handler_data) =
FuncResolverBuilder::new(isa, functions, &self.info)?;
let func_resolver =
Box::new(func_resolver_builder.finalize(&self.info.signatures)?);
let trampolines = Arc::new(Trampolines::new(isa, &self.info));
let trampolines = Trampolines::new(isa, &self.info);
let (func_resolver, backend_cache) =
func_resolver_builder.finalize(&self.info.signatures, Arc::clone(&trampolines), handler_data.clone())?;
let protected_caller =
Box::new(Caller::new(&self.info, handler_data, trampolines));
Caller::new(&self.info, handler_data, trampolines);
let cache_gen = Box::new(CacheGenerator::new(backend_cache, Arc::clone(&func_resolver.memory)));
Ok(ModuleInner {
func_resolver,
protected_caller,
func_resolver: Box::new(func_resolver),
protected_caller: Box::new(protected_caller),
cache_gen,
info: self.info,
})
}
#[cfg(feature = "cache")]
pub fn compile_to_backend_cache(
self,
isa: &isa::TargetIsa,
functions: Map<LocalFuncIndex, ir::Function>,
) -> CompileResult<(ModuleInfo, BackendCache, Memory)> {
let (func_resolver_builder, handler_data) =
FuncResolverBuilder::new(isa, functions, &self.info)?;
// #[cfg(feature = "cache")]
// pub fn compile_to_backend_cache(
// self,
// isa: &isa::TargetIsa,
// functions: Map<LocalFuncIndex, ir::Function>,
// ) -> CompileResult<(ModuleInfo, BackendCache, Memory)> {
// let (func_resolver_builder, handler_data) =
// FuncResolverBuilder::new(isa, functions, &self.info)?;
let trampolines = Trampolines::new(isa, &self.info);
// let trampolines = Trampolines::new(isa, &self.info);
let trampoline_cache = trampolines.to_trampoline_cache();
// let trampoline_cache = trampolines.to_trampoline_cache();
let (backend_cache, compiled_code) =
func_resolver_builder.to_backend_cache(trampoline_cache, handler_data);
// let (backend_cache, compiled_code) =
// func_resolver_builder.to_backend_cache(trampoline_cache, handler_data);
Ok((self.info, backend_cache, compiled_code))
}
// Ok((self.info, backend_cache, compiled_code))
// }
#[cfg(feature = "cache")]
pub fn from_cache(cache: Cache) -> Result<ModuleInner, CacheError> {
@ -111,17 +114,19 @@ impl Module {
let (func_resolver_builder, trampolines, handler_data) =
FuncResolverBuilder::new_from_backend_cache(backend_cache, compiled_code, &info)?;
let func_resolver = Box::new(
let (func_resolver, backend_cache) =
func_resolver_builder
.finalize(&info.signatures)
.map_err(|e| CacheError::Unknown(format!("{:?}", e)))?,
);
.finalize(&info.signatures, Arc::clone(&trampolines), handler_data.clone())
.map_err(|e| CacheError::Unknown(format!("{:?}", e)))?;
let protected_caller = Box::new(Caller::new(&info, handler_data, trampolines));
let protected_caller = Caller::new(&info, handler_data, trampolines);
let cache_gen = Box::new(CacheGenerator::new(backend_cache, Arc::clone(&func_resolver.memory)));
Ok(ModuleInner {
func_resolver,
protected_caller,
func_resolver: Box::new(func_resolver),
protected_caller: Box::new(protected_caller),
cache_gen,
info,
})
}

View File

@ -17,6 +17,7 @@ use cranelift_codegen::{ir, isa, Context};
use std::{
mem,
ptr::{write_unaligned, NonNull},
cell::UnsafeCell,
sync::Arc,
};
#[cfg(feature = "cache")]
@ -56,7 +57,7 @@ impl FuncResolverBuilder {
backend_cache: BackendCache,
mut code: Memory,
info: &ModuleInfo,
) -> Result<(Self, Trampolines, HandlerData), CacheError> {
) -> Result<(Self, Arc<Trampolines>, HandlerData), CacheError> {
unsafe {
code.protect(.., Protect::ReadWrite)
.map_err(|e| CacheError::Unknown(e.to_string()))?;
@ -69,35 +70,17 @@ impl FuncResolverBuilder {
Self {
resolver: FuncResolver {
map: backend_cache.offsets,
memory: code,
memory: Arc::new(UnsafeCell::new(code)),
},
local_relocs: Map::new(),
external_relocs: backend_cache.external_relocs,
import_len: info.imported_functions.len(),
},
Trampolines::from_trampoline_cache(backend_cache.trampolines),
Arc::new(Trampolines::from_trampoline_cache(backend_cache.trampolines)),
handler_data,
))
}
#[cfg(feature = "cache")]
pub fn to_backend_cache(
mut self,
trampolines: TrampolineCache,
handler_data: HandlerData,
) -> (BackendCache, Memory) {
self.relocate_locals();
(
BackendCache {
external_relocs: self.external_relocs,
offsets: self.resolver.map,
trap_sink: handler_data.trap_data,
trampolines,
},
self.resolver.memory,
)
}
pub fn new(
isa: &isa::TargetIsa,
function_bodies: Map<LocalFuncIndex, ir::Function>,
@ -169,10 +152,10 @@ impl FuncResolverBuilder {
previous_end = new_end;
}
let handler_data = HandlerData::new(trap_sink, memory.as_ptr() as _, memory.size());
let handler_data = HandlerData::new(Arc::new(trap_sink), memory.as_ptr() as _, memory.size());
let mut func_resolver_builder = Self {
resolver: FuncResolver { map, memory },
resolver: FuncResolver { map, memory: Arc::new(UnsafeCell::new(memory)) },
local_relocs,
external_relocs,
import_len: info.imported_functions.len(),
@ -207,128 +190,143 @@ impl FuncResolverBuilder {
}
pub fn finalize(
mut self,
self,
signatures: &SliceMap<SigIndex, Arc<FuncSig>>,
) -> CompileResult<FuncResolver> {
for (index, relocs) in self.external_relocs.iter() {
for ref reloc in relocs.iter() {
let target_func_address: isize = match reloc.target {
RelocationType::LibCall(libcall) => match libcall {
LibCall::CeilF32 => libcalls::ceilf32 as isize,
LibCall::FloorF32 => libcalls::floorf32 as isize,
LibCall::TruncF32 => libcalls::truncf32 as isize,
LibCall::NearestF32 => libcalls::nearbyintf32 as isize,
LibCall::CeilF64 => libcalls::ceilf64 as isize,
LibCall::FloorF64 => libcalls::floorf64 as isize,
LibCall::TruncF64 => libcalls::truncf64 as isize,
LibCall::NearestF64 => libcalls::nearbyintf64 as isize,
#[cfg(all(target_pointer_width = "64", target_os = "windows"))]
LibCall::Probestack => __chkstk as isize,
#[cfg(not(target_os = "windows"))]
LibCall::Probestack => __rust_probestack as isize,
},
RelocationType::Intrinsic(ref name) => match name.as_str() {
"i32print" => i32_print as isize,
"i64print" => i64_print as isize,
"f32print" => f32_print as isize,
"f64print" => f64_print as isize,
"strtdbug" => start_debug as isize,
"enddbug" => end_debug as isize,
_ => Err(CompileError::InternalError {
msg: format!("unexpected intrinsic: {}", name),
})?,
},
RelocationType::VmCall(vmcall) => match vmcall {
VmCall::Local(kind) => match kind {
VmCallKind::StaticMemoryGrow => vmcalls::local_static_memory_grow as _,
VmCallKind::StaticMemorySize => vmcalls::local_static_memory_size as _,
trampolines: Arc<Trampolines>,
handler_data: HandlerData,
) -> CompileResult<(FuncResolver, BackendCache)> {
{
let mut memory = unsafe { (*self.resolver.memory.get()) };
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::local_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::local_dynamic_memory_size as _
}
for (index, relocs) in self.external_relocs.iter() {
for ref reloc in relocs.iter() {
let target_func_address: isize = match reloc.target {
RelocationType::LibCall(libcall) => match libcall {
LibCall::CeilF32 => libcalls::ceilf32 as isize,
LibCall::FloorF32 => libcalls::floorf32 as isize,
LibCall::TruncF32 => libcalls::truncf32 as isize,
LibCall::NearestF32 => libcalls::nearbyintf32 as isize,
LibCall::CeilF64 => libcalls::ceilf64 as isize,
LibCall::FloorF64 => libcalls::floorf64 as isize,
LibCall::TruncF64 => libcalls::truncf64 as isize,
LibCall::NearestF64 => libcalls::nearbyintf64 as isize,
#[cfg(all(target_pointer_width = "64", target_os = "windows"))]
LibCall::Probestack => __chkstk as isize,
#[cfg(not(target_os = "windows"))]
LibCall::Probestack => __rust_probestack as isize,
},
VmCall::Import(kind) => match kind {
VmCallKind::StaticMemoryGrow => {
vmcalls::imported_static_memory_grow as _
}
VmCallKind::StaticMemorySize => {
vmcalls::imported_static_memory_size as _
}
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::imported_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::imported_dynamic_memory_size as _
}
RelocationType::Intrinsic(ref name) => match name.as_str() {
"i32print" => i32_print as isize,
"i64print" => i64_print as isize,
"f32print" => f32_print as isize,
"f64print" => f64_print as isize,
"strtdbug" => start_debug as isize,
"enddbug" => end_debug as isize,
_ => Err(CompileError::InternalError {
msg: format!("unexpected intrinsic: {}", name),
})?,
},
RelocationType::VmCall(vmcall) => match vmcall {
VmCall::Local(kind) => match kind {
VmCallKind::StaticMemoryGrow => vmcalls::local_static_memory_grow as _,
VmCallKind::StaticMemorySize => vmcalls::local_static_memory_size as _,
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::local_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::local_dynamic_memory_size as _
}
},
VmCall::Import(kind) => match kind {
VmCallKind::StaticMemoryGrow => {
vmcalls::imported_static_memory_grow as _
}
VmCallKind::StaticMemorySize => {
vmcalls::imported_static_memory_size as _
}
VmCallKind::SharedStaticMemoryGrow => unimplemented!(),
VmCallKind::SharedStaticMemorySize => unimplemented!(),
VmCallKind::DynamicMemoryGrow => {
vmcalls::imported_dynamic_memory_grow as _
}
VmCallKind::DynamicMemorySize => {
vmcalls::imported_dynamic_memory_size as _
}
},
},
RelocationType::Signature(sig_index) => {
let sig_index =
SigRegistry.lookup_sig_index(Arc::clone(&signatures[sig_index]));
sig_index.index() as _
}
};
// We need the address of the current function
// because some of these calls are relative.
let func_addr = self.resolver.lookup(index).unwrap().as_ptr();
// Determine relocation type and apply relocation.
match reloc.reloc {
Reloc::Abs8 => {
let ptr_to_write = (target_func_address as u64)
.checked_add(reloc.addend as u64)
.unwrap();
let empty_space_offset = self.resolver.map[index] + reloc.offset as usize;
let ptr_slice = unsafe {
&mut memory.as_slice_mut()
[empty_space_offset..empty_space_offset + 8]
};
LittleEndian::write_u64(ptr_slice, ptr_to_write);
}
Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => unsafe {
let reloc_address = (func_addr as usize) + reloc.offset as usize;
let reloc_delta = target_func_address
.wrapping_sub(reloc_address as isize)
.wrapping_add(reloc.addend as isize);
write_unaligned(reloc_address as *mut u32, reloc_delta as u32);
},
},
RelocationType::Signature(sig_index) => {
let sig_index =
SigRegistry.lookup_sig_index(Arc::clone(&signatures[sig_index]));
sig_index.index() as _
}
};
// We need the address of the current function
// because some of these calls are relative.
let func_addr = self.resolver.lookup(index).unwrap().as_ptr();
// Determine relocation type and apply relocation.
match reloc.reloc {
Reloc::Abs8 => {
let ptr_to_write = (target_func_address as u64)
.checked_add(reloc.addend as u64)
.unwrap();
let empty_space_offset = self.resolver.map[index] + reloc.offset as usize;
let ptr_slice = unsafe {
&mut self.resolver.memory.as_slice_mut()
[empty_space_offset..empty_space_offset + 8]
};
LittleEndian::write_u64(ptr_slice, ptr_to_write);
}
Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => unsafe {
let reloc_address = (func_addr as usize) + reloc.offset as usize;
let reloc_delta = target_func_address
.wrapping_sub(reloc_address as isize)
.wrapping_add(reloc.addend as isize);
write_unaligned(reloc_address as *mut u32, reloc_delta as u32);
},
}
}
unsafe {
memory
.protect(.., Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
}
unsafe {
self.resolver
.memory
.protect(.., Protect::ReadExec)
.map_err(|e| CompileError::InternalError { msg: e.to_string() })?;
}
let backend_cache = BackendCache {
external_relocs: self.external_relocs.clone(),
offsets: self.resolver.map.clone(),
trap_sink: handler_data.trap_data,
trampolines: trampolines.to_trampoline_cache(),
};
Ok(self.resolver)
Ok((self.resolver, backend_cache))
}
}
unsafe impl Sync for FuncResolver {}
unsafe impl Send for FuncResolver {}
/// Resolves a function index to a function address.
pub struct FuncResolver {
map: Map<LocalFuncIndex, usize>,
memory: Memory,
pub(crate) memory: Arc<UnsafeCell<Memory>>,
}
impl FuncResolver {
fn lookup(&self, local_func_index: LocalFuncIndex) -> Option<NonNull<vm::Func>> {
let offset = *self.map.get(local_func_index)?;
let ptr = unsafe { self.memory.as_ptr().add(offset) };
let ptr = unsafe { (*self.memory.get()).as_ptr().add(offset) };
NonNull::new(ptr).map(|nonnull| nonnull.cast())
}

View File

@ -40,11 +40,11 @@ impl UserTrapper for Trapper {
pub struct Caller {
func_export_set: HashSet<FuncIndex>,
handler_data: HandlerData,
trampolines: Trampolines,
trampolines: Arc<Trampolines>,
}
impl Caller {
pub fn new(module: &ModuleInfo, handler_data: HandlerData, trampolines: Trampolines) -> Self {
pub fn new(module: &ModuleInfo, handler_data: HandlerData, trampolines: Arc<Trampolines>) -> Self {
let mut func_export_set = HashSet::new();
for export_index in module.exports.values() {
if let ExportIndex::Func(func_index) = export_index {
@ -187,15 +187,16 @@ fn get_func_from_index(
unsafe impl Send for HandlerData {}
unsafe impl Sync for HandlerData {}
#[derive(Clone)]
pub struct HandlerData {
pub trap_data: TrapSink,
pub trap_data: Arc<TrapSink>,
exec_buffer_ptr: *const c_void,
exec_buffer_size: usize,
}
impl HandlerData {
pub fn new(
trap_data: TrapSink,
trap_data: Arc<TrapSink>,
exec_buffer_ptr: *const c_void,
exec_buffer_size: usize,
) -> Self {

View File

@ -58,7 +58,7 @@ impl Trampolines {
}
#[cfg(feature = "cache")]
pub fn to_trampoline_cache(self) -> TrampolineCache {
pub fn to_trampoline_cache(&self) -> TrampolineCache {
let mut code = vec![0; self.memory.size()];
unsafe {
@ -67,7 +67,7 @@ impl Trampolines {
TrampolineCache {
code,
offsets: self.offsets,
offsets: self.offsets.clone(),
}
}

View File

@ -45,13 +45,6 @@ pub trait Compiler {
#[cfg(feature = "cache")]
unsafe fn from_cache(&self, cache: Cache, _: Token) -> Result<ModuleInner, CacheError>;
#[cfg(feature = "cache")]
fn compile_to_backend_cache_data(
&self,
wasm: &[u8],
_: Token,
) -> CompileResult<(Box<ModuleInfo>, Vec<u8>, Memory)>;
}
/// The functionality exposed by this trait is expected to be used
@ -101,5 +94,5 @@ pub trait FuncResolver: Send + Sync {
}
pub trait CacheGen: Send + Sync {
fn generate_cache(&self, module: &ModuleInner) -> Result<(Box<ModuleInfo>, Box<[u8]>, Memory), CacheError>;
fn generate_cache(&self, module: &ModuleInner) -> Result<(Box<ModuleInfo>, Box<[u8]>, Arc<Memory>), CacheError>;
}

View File

@ -90,17 +90,17 @@ pub fn validate(wasm: &[u8]) -> bool {
}
}
#[cfg(feature = "cache")]
pub fn compile_to_cache_with(
wasm: &[u8],
compiler: &dyn backend::Compiler,
) -> CompileResult<Cache> {
let token = backend::Token::generate();
let (info, backend_metadata, compiled_code) =
compiler.compile_to_backend_cache_data(wasm, token)?;
// #[cfg(feature = "cache")]
// pub fn compile_to_cache_with(
// wasm: &[u8],
// compiler: &dyn backend::Compiler,
// ) -> CompileResult<Cache> {
// let token = backend::Token::generate();
// let (info, backend_metadata, compiled_code) =
// compiler.compile_to_backend_cache_data(wasm, token)?;
Ok(Cache::new(wasm, info, backend_metadata, compiled_code))
}
// Ok(Cache::new(wasm, info, backend_metadata, compiled_code))
// }
#[cfg(feature = "cache")]
pub unsafe fn load_cache_with(

View File

@ -1,5 +1,5 @@
use crate::{
backend::{Backend, FuncResolver, ProtectedCaller},
backend::{Backend, FuncResolver, ProtectedCaller, CacheGen},
error::Result,
import::ImportObject,
structures::{Map, TypedIndex},
@ -21,10 +21,12 @@ use std::sync::Arc;
pub struct ModuleInner {
pub func_resolver: Box<dyn FuncResolver>,
pub protected_caller: Box<dyn ProtectedCaller>,
pub cache_gen: Box<dyn CacheGen>,
pub info: ModuleInfo,
}
#[derive(Clone)]
#[cfg_attr(feature = "cache", derive(Serialize, Deserialize))]
pub struct ModuleInfo {
// This are strictly local and the typsystem ensures that.

View File

@ -494,7 +494,8 @@ mod vm_ctx_tests {
fn generate_module() -> ModuleInner {
use super::Func;
use crate::backend::{Backend, FuncResolver, ProtectedCaller, Token, UserTrapper};
use crate::backend::{Backend, FuncResolver, ProtectedCaller, Token, UserTrapper, CacheGen, sys::Memory};
use crate::cache::Error as CacheError;
use crate::error::RuntimeResult;
use crate::types::{FuncIndex, LocalFuncIndex, Value};
use crate::module::WasmHash;
@ -526,10 +527,17 @@ mod vm_ctx_tests {
unimplemented!()
}
}
impl CacheGen for Placeholder {
fn generate_cache(&self, module: &ModuleInner) -> Result<(Box<ModuleInfo>, Box<[u8]>, Memory), CacheError> {
unimplemented!()
}
}
ModuleInner {
func_resolver: Box::new(Placeholder),
protected_caller: Box::new(Placeholder),
cache_gen: Box::new(Placeholder),
info: ModuleInfo {
memories: Map::new(),
globals: Map::new(),