Add caching support for singlepass backend.

This commit is contained in:
losfair 2019-11-28 02:49:52 +08:00
parent dfc7163b71
commit cfa0635026
5 changed files with 127 additions and 31 deletions

3
Cargo.lock generated
View File

@ -1561,12 +1561,15 @@ dependencies = [
name = "wasmer-singlepass-backend"
version = "0.11.0"
dependencies = [
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"dynasm 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"dynasmrt 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"wasmer-runtime-core 0.11.0",
]

View File

@ -7,11 +7,11 @@ use std::collections::BTreeMap;
use std::ops::Bound::{Included, Unbounded};
/// An index to a register
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct RegisterIndex(pub usize);
/// A kind of wasm or constant value
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub enum WasmAbstractValue {
/// A wasm runtime value
Runtime,
@ -20,7 +20,7 @@ pub enum WasmAbstractValue {
}
/// A container for the state of a running wasm instance.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MachineState {
/// Stack values.
pub stack_values: Vec<MachineValue>,
@ -37,7 +37,7 @@ pub struct MachineState {
}
/// A diff of two `MachineState`s.
#[derive(Clone, Debug, Default)]
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct MachineStateDiff {
/// Last.
pub last: Option<usize>,
@ -63,7 +63,7 @@ pub struct MachineStateDiff {
}
/// A kind of machine value.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub enum MachineValue {
/// Undefined.
Undefined,
@ -86,7 +86,7 @@ pub enum MachineValue {
}
/// A map of function states.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FunctionStateMap {
/// Initial.
pub initial: MachineState,
@ -111,7 +111,7 @@ pub struct FunctionStateMap {
}
/// A kind of suspend offset.
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum SuspendOffset {
/// A loop.
Loop(usize),
@ -122,7 +122,7 @@ pub enum SuspendOffset {
}
/// Info for an offset.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OffsetInfo {
/// End offset.
pub end_offset: usize, // excluded bound
@ -133,7 +133,7 @@ pub struct OffsetInfo {
}
/// A map of module state.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ModuleStateMap {
/// Local functions.
pub local_functions: BTreeMap<usize, FunctionStateMap>,

View File

@ -19,3 +19,6 @@ byteorder = "1.3"
nix = "0.15"
libc = "0.2.60"
smallvec = "0.6"
serde = "1.0"
serde_derive = "1.0"
bincode = "1.2"

View File

@ -22,8 +22,10 @@ use std::{
};
use wasmer_runtime_core::{
backend::{
get_inline_breakpoint_size, sys::Memory, Architecture, Backend, CacheGen, CompilerConfig,
MemoryBoundCheckMode, RunnableModule, Token,
get_inline_breakpoint_size,
sys::{Memory, Protect},
Architecture, Backend, CacheGen, CompilerConfig, MemoryBoundCheckMode, RunnableModule,
Token,
},
cache::{Artifact, Error as CacheError},
codegen::*,
@ -229,8 +231,6 @@ unsafe impl Sync for FuncPtr {}
pub struct X64ExecutionContext {
#[allow(dead_code)]
code: CodeMemory,
#[allow(dead_code)]
functions: Vec<X64FunctionCode>,
function_pointers: Vec<FuncPtr>,
function_offsets: Vec<AssemblyOffset>,
signatures: Arc<Map<SigIndex, FuncSig>>,
@ -239,6 +239,25 @@ pub struct X64ExecutionContext {
msm: ModuleStateMap,
}
/// On-disk cache format.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CacheImage {
/// Code for the whole module.
code: Vec<u8>,
/// Offsets to the beginnings of each function. (including trampoline, if any)
function_pointers: Vec<usize>,
/// Offsets to the beginnings of each function after trampoline.
function_offsets: Vec<usize>,
/// Number of imported functions.
func_import_count: usize,
/// Module state map.
msm: ModuleStateMap,
}
#[derive(Debug)]
pub struct ControlFrame {
pub label: DynamicLabel,
@ -257,6 +276,25 @@ pub enum IfElseState {
Else,
}
pub struct SinglepassCache {
buffer: Arc<[u8]>,
}
impl CacheGen for SinglepassCache {
fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
let mut memory = Memory::with_size_protect(self.buffer.len(), Protect::ReadWrite)
.map_err(CacheError::SerializeError)?;
let buffer = &*self.buffer;
unsafe {
memory.as_slice_mut()[..buffer.len()].copy_from_slice(buffer);
}
Ok(([].as_ref().into(), memory))
}
}
impl RunnableModule for X64ExecutionContext {
fn get_func(
&self,
@ -677,29 +715,41 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
.map(|x| (x.offset, x.fsm.clone()))
.collect();
struct Placeholder;
impl CacheGen for Placeholder {
fn generate_cache(&self) -> Result<(Box<[u8]>, Memory), CacheError> {
Err(CacheError::Unknown(
"the singlepass backend doesn't support caching yet".to_string(),
))
}
}
let msm = ModuleStateMap {
local_functions: local_function_maps,
total_size,
};
let cache_image = CacheImage {
code: output.to_vec(),
function_pointers: out_labels
.iter()
.map(|x| {
(x.0 as usize)
.checked_sub(output.as_ptr() as usize)
.unwrap()
})
.collect(),
function_offsets: out_offsets.iter().map(|x| x.0 as usize).collect(),
func_import_count: self.func_import_count,
msm: msm.clone(),
};
let cache = SinglepassCache {
buffer: Arc::from(bincode::serialize(&cache_image).unwrap().into_boxed_slice()),
};
Ok((
X64ExecutionContext {
code: output,
functions: self.functions,
signatures: self.signatures.as_ref().unwrap().clone(),
breakpoints: breakpoints,
func_import_count: self.func_import_count,
function_pointers: out_labels,
function_offsets: out_offsets,
msm: ModuleStateMap {
local_functions: local_function_maps,
total_size,
},
msm: msm,
},
Box::new(Placeholder),
Box::new(cache),
))
}
@ -771,10 +821,45 @@ impl ModuleCodeGenerator<X64FunctionCode, X64ExecutionContext, CodegenError>
}));
Ok(())
}
unsafe fn from_cache(_artifact: Artifact, _: Token) -> Result<ModuleInner, CacheError> {
Err(CacheError::Unknown(
"the singlepass compiler API doesn't support caching yet".to_string(),
))
unsafe fn from_cache(artifact: Artifact, _: Token) -> Result<ModuleInner, CacheError> {
let (info, _, memory) = artifact.consume();
let cache_image: CacheImage = bincode::deserialize(memory.as_slice())
.map_err(|x| CacheError::DeserializeError(format!("{:?}", x)))?;
let mut code_mem = CodeMemory::new(cache_image.code.len());
code_mem[0..cache_image.code.len()].copy_from_slice(&cache_image.code);
code_mem.make_executable();
let function_pointers: Vec<FuncPtr> = cache_image
.function_pointers
.iter()
.map(|&x| FuncPtr(code_mem.as_ptr().offset(x as isize) as *const FuncPtrInner))
.collect();
let function_offsets: Vec<AssemblyOffset> = cache_image
.function_offsets
.iter()
.cloned()
.map(AssemblyOffset)
.collect();
let ec = X64ExecutionContext {
code: code_mem,
function_pointers,
function_offsets,
signatures: Arc::new(info.signatures.clone()),
breakpoints: Arc::new(HashMap::new()),
func_import_count: cache_image.func_import_count,
msm: cache_image.msm,
};
Ok(ModuleInner {
runnable_module: Box::new(ec),
cache_gen: Box::new(SinglepassCache {
buffer: Arc::from(memory.as_slice().to_vec().into_boxed_slice()),
}),
info,
})
}
}

View File

@ -20,6 +20,11 @@ compile_error!("This crate doesn't yet support compiling on operating systems ot
extern crate dynasmrt;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate dynasm;