new runtime module

This commit is contained in:
Lachlan Sneff 2018-12-24 17:25:17 -05:00
parent 5b920b7953
commit 93ef1e4220
13 changed files with 646 additions and 230 deletions

View File

@ -1,7 +1,7 @@
use std::ops::{Index, IndexMut}; use std::ops::{Index, IndexMut};
use std::ptr::NonNull; use std::ptr::NonNull;
use std::marker::PhantomData; use std::marker::PhantomData;
use cranelift_entity::EntityRef; use crate::runtime::types::MapIndex;
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
#[repr(transparent)] #[repr(transparent)]
@ -109,19 +109,32 @@ impl<'a, T> From<&'a [T]> for BoundedSlice<T> {
#[derive(Debug)] #[derive(Debug)]
#[repr(transparent)] #[repr(transparent)]
pub struct IndexedSlice<T, I> { pub struct IndexedSlice<'a, T, I> {
ptr: NonNull<T>, ptr: NonNull<T>,
_phantom: PhantomData<I>, _phantom: PhantomData<I>,
} }
impl<T, I> IndexedSlice<T, I> { impl<'a, T: 'a, I> IndexedSlice<T, I>
pub fn new(ptr: *mut T) -> Self { where
I: MapIndex,
{
pub(crate) fn new(ptr: *mut T) -> Self {
Self { Self {
ptr: NonNull::new(ptr).unwrap(), ptr: NonNull::new(ptr).unwrap(),
_phantom: PhantomData, _phantom: PhantomData,
} }
} }
pub unsafe fn get(&self, index: I) -> &T {
let ptr = self.as_ptr();
&*ptr.add(index.index())
}
pub unsafe fn get_mut(&mut self, index: I) -> &mut T {
let ptr = self.as_mut_ptr();
&mut *ptr.add(index.index())
}
pub fn as_ptr(&self) -> *const T { pub fn as_ptr(&self) -> *const T {
self.ptr.as_ptr() self.ptr.as_ptr()
} }
@ -129,19 +142,4 @@ impl<T, I> IndexedSlice<T, I> {
pub fn as_mut_ptr(&mut self) -> *mut T { pub fn as_mut_ptr(&mut self) -> *mut T {
self.ptr.as_ptr() self.ptr.as_ptr()
} }
}
impl<T, I: EntityRef> Index<I> for IndexedSlice<T, I> {
type Output = T;
fn index(&self, index: I) -> &T {
let ptr = self.as_ptr();
unsafe { &*ptr.add(index.index()) }
}
}
impl<T, I: EntityRef> IndexMut<I> for IndexedSlice<T, I> {
fn index_mut(&mut self, index: I) -> &mut T {
let ptr = self.as_mut_ptr();
unsafe { &mut *ptr.add(index.index()) }
}
} }

View File

@ -34,3 +34,4 @@ pub mod sighandler;
mod spectests; mod spectests;
pub mod update; pub mod update;
pub mod webassembly; pub mod webassembly;
pub mod runtime;

15
src/runtime/backend.rs Normal file
View File

@ -0,0 +1,15 @@
use crate::runtime::module::Module;
use crate::runtime::types::FuncIndex;
use crate::runtime::{
vm,
module::Module,
types::FuncIndex,
};
pub trait Compiler {
fn compile(wasm: &[u8]) -> Box<Module>;
}
pub trait FuncResolver {
pub fn resolve(&self, index: FuncIndex) -> *const vm::Func;
}

View File

@ -1,13 +1,20 @@
use super::vm; use super::vm;
use super::module::Module; use crate::module::Module;
use super::table::{TableBacking, TableScheme}; use super::table::{TableBacking, TableScheme};
use super::memory::LinearMemory; use super::memory::LinearMemory;
use super::instance::{InstanceOptions, InstanceABI}; use super::instance::{InstanceOptions, InstanceABI};
use super::ImportObject; use std::mem;
use cranelift_entity::EntityRef;
use crate::runtime::{
vm,
module::Module,
table::TableBacking,
types::{Val, GlobalInit},
};
#[derive(Debug)] #[derive(Debug)]
pub struct Backing { pub struct LocalBacking {
memories: Box<[LinearMemory]>, memories: Box<[LinearMemory]>,
tables: Box<[TableBacking]>, tables: Box<[TableBacking]>,
@ -16,47 +23,48 @@ pub struct Backing {
vm_globals: Box<[vm::LocalGlobal]>, vm_globals: Box<[vm::LocalGlobal]>,
} }
impl Backing { impl LocalBacking {
pub fn new(module: &Module, options: &InstanceOptions, imports: &ImportObject) -> Self { pub fn new(module: &Module, imports: &ImportBacking, options: &InstanceOptions) -> Self {
let memories = Backing::generate_memories(module, options); let mut memories = Self::generate_memories(module, options);
let tables = Backing::generate_tables(module, options); let mut tables = Self::generate_tables(module, options);
let globals = Self::generate_globals(module);
Backing { Self {
memories, memories,
tables, tables,
vm_memories: Backing::finalize_memories(module, &memories, options), vm_memories: Self::finalize_memories(module, &mut memories[..], options),
vm_tables: Backing::finalize_tables(module, &tables, options, imports), vm_tables: Self::finalize_tables(module, &mut tables[..], options),
vm_globals: Backing::generate_globals(module), vm_globals: Self::finalize_globals(module, imports, globals),
} }
} }
fn generate_memories(module: &Module, options: &InstanceOptions) -> Box<[LinearMemory]> { fn generate_memories(module: &Module, options: &InstanceOptions) -> Box<[LinearMemory]> {
let memories = Vec::with_capacity(module.info.memories.len()); let mut memories = Vec::with_capacity(module.memories.len());
for mem in &module.info.memories { for (_, mem) in &module.memories {
let memory = mem.entity;
// If we use emscripten, we set a fixed initial and maximum // If we use emscripten, we set a fixed initial and maximum
debug!( debug!(
"Instance - init memory ({}, {:?})", "Instance - init memory ({}, {:?})",
memory.minimum, memory.maximum memory.min, memory.max
); );
let memory = if options.abi == InstanceABI::Emscripten { // let memory = if options.abi == InstanceABI::Emscripten {
// We use MAX_PAGES, so at the end the result is: // // We use MAX_PAGES, so at the end the result is:
// (initial * LinearMemory::PAGE_SIZE) == LinearMemory::DEFAULT_HEAP_SIZE // // (initial * LinearMemory::PAGE_SIZE) == LinearMemory::DEFAULT_HEAP_SIZE
// However, it should be: (initial * LinearMemory::PAGE_SIZE) == 16777216 // // However, it should be: (initial * LinearMemory::PAGE_SIZE) == 16777216
LinearMemory::new(LinearMemory::MAX_PAGES, None) // LinearMemory::new(LinearMemory::MAX_PAGES, None)
} else { // } else {
LinearMemory::new(memory.minimum, memory.maximum.map(|m| m as u32)) // LinearMemory::new(memory.minimum, memory.maximum.map(|m| m as u32))
}; // };
let memory = LinearMemory::new(mem);
memories.push(memory); memories.push(memory);
} }
memories.into_boxed_slice() memories.into_boxed_slice()
} }
fn finalize_memories(module: &Module, memories: &[LinearMemory], options: &InstanceOptions) -> Box<[vm::LocalMemory]> { fn finalize_memories(module: &Module, memories: &mut [LinearMemory], options: &InstanceOptions) -> Box<[vm::LocalMemory]> {
for init in &module.info.data_initializers { for init in &module.data_initializers {
debug_assert!(init.base.is_none(), "globalvar base not supported yet"); debug_assert!(init.base.is_none(), "globalvar base not supported yet");
let offset = init.offset; let offset = init.offset;
let mem: &mut LinearMemory = &mut memories[init.memory_index.index()]; let mem: &mut LinearMemory = &mut memories[init.memory_index.index()];
@ -76,35 +84,44 @@ impl Backing {
debug!("emscripten::finish setup memory"); debug!("emscripten::finish setup memory");
} }
memories.iter().map(|mem| mem.into_vm_memory()).collect::<Vec<_>>().into_boxed_slice() memories.iter_mut().map(|mem| mem.into_vm_memory()).collect::<Vec<_>>().into_boxed_slice()
} }
fn generate_tables(module: &Module, options: &InstanceOptions) -> Box<[TableBacking]> { fn generate_tables(module: &Module, options: &InstanceOptions) -> Box<[TableBacking]> {
let mut tables = Vec::with_capacity(module.info.tables.len()); let mut tables = Vec::with_capacity(module.tables.len());
for table in &module.info.tables { for table in &module.tables {
let scheme = TableScheme::from_table(table.entity); let table_backing = TableBacking::new(table);
let table_backing = TableBacking::new(&scheme); tables.push(table_backing);
tables.push(table_backing);
} }
tables.into_boxed_slice() tables.into_boxed_slice()
} }
fn finalize_tables(module: &Module, tables: &[TableBacking], options: &InstanceOptions, imports: &ImportObject) -> Box<[vm::LocalTable]> { fn finalize_tables(module: &Module, tables: &[TableBacking], options: &InstanceOptions) -> Box<[vm::LocalTable]> {
tables.iter().map(|table| table.into_vm_table()).collect::<Vec<_>>().into_boxed_slice() tables.iter().map(|table| table.into_vm_table()).collect::<Vec<_>>().into_boxed_slice()
} }
fn generate_globals(module: &Module) -> Box<[vm::LocalGlobal]> { fn generate_globals(module: &Module) -> Box<[vm::LocalGlobal]> {
let mut globals = Vec::with_capacity(module.info.globals.len()); let mut globals = vec![vm::LocalGlobal::null(); module.globals.len()];
for global in module.info.globals.iter().map(|mem| mem.entity) {
}
globals.into_boxed_slice() globals.into_boxed_slice()
} }
fn finalize_globals(module: &Module, imports: &ImportBacking, globals: Box<[vm::LocalGlobal]>) -> Box<[vm::LocalGlobal]> {
for (to, from) in globals.iter_mut().zip(module.globals.iter()) {
*to = match from.init {
GlobalInit::Val(Val::I32(x)) => x as u64,
GlobalInit::Val(Val::I64(x)) => x as u64,
GlobalInit::Val(Val::F32(x)) => x as u64,
GlobalInit::Val(Val::F64(x)) => x,
GlobalInit::GetGlobal(index) => unsafe { (*imports.globals[index.index()].global).data },
};
}
globals
}
// fn generate_tables(module: &Module, _options: &InstanceOptions) -> (Box<[TableBacking]>, Box<[vm::LocalTable]>) { // fn generate_tables(module: &Module, _options: &InstanceOptions) -> (Box<[TableBacking]>, Box<[vm::LocalTable]>) {
// let mut tables = Vec::new(); // let mut tables = Vec::new();
// // Reserve space for tables // // Reserve space for tables
@ -174,7 +191,7 @@ impl Backing {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ImportsBacking { pub struct ImportBacking {
functions: Box<[vm::ImportedFunc]>, functions: Box<[vm::ImportedFunc]>,
memories: Box<[vm::ImportedMemory]>, memories: Box<[vm::ImportedMemory]>,
tables: Box<[vm::ImportedTable]>, tables: Box<[vm::ImportedTable]>,

26
src/runtime/instance.rs Normal file
View File

@ -0,0 +1,26 @@
use crate::runtime::{
vm,
backing::{LocalBacking, ImportBacking},
};
use std::sync::Arc;
pub struct Instance {
pub vmctx: vm::Ctx,
pub finalized_funcs: Box<[*const vm::Func]>,
pub backing: LocalBacking,
pub imports: ImportBacking,
pub module: Arc<Module>,
}
impl Instance {
pub fn new(module: Arc<Module>) -> Box<Instance> {
Box::new(Instance {
vmctx,
finalized_funcs
})
}
}

View File

@ -8,15 +8,18 @@ use std::ops::{Deref, DerefMut};
use std::slice; use std::slice;
use crate::common::mmap::Mmap; use crate::common::mmap::Mmap;
use super::vm::LocalMemory; use crate::runtime::{
vm::LocalMemory,
types::{Memory, Map, FuncIndex},
};
/// A linear memory instance. /// A linear memory instance.
#[derive(Debug)] #[derive(Debug)]
pub struct LinearMemory { pub struct LinearMemory {
// The mmap allocation /// The actual memory allocation.
mmap: Mmap, mmap: Mmap,
// current number of wasm pages /// The current number of wasm pages.
current: u32, current: u32,
// The maximum size the WebAssembly Memory is allowed to grow // The maximum size the WebAssembly Memory is allowed to grow
@ -25,17 +28,20 @@ pub struct LinearMemory {
// front. However, the engine may ignore or clamp this reservation // front. However, the engine may ignore or clamp this reservation
// request. In general, most WebAssembly modules shouldn't need // request. In general, most WebAssembly modules shouldn't need
// to set a maximum. // to set a maximum.
maximum: Option<u32>, max: Option<u32>,
// The size of the extra guard pages after the end. // The size of the extra guard pages after the end.
// Is used to optimize loads and stores with constant offsets. // Is used to optimize loads and stores with constant offsets.
offset_guard_size: usize, offset_guard_size: usize,
/// Requires exception catching to handle out-of-bounds accesses.
requires_signal_catch: bool,
} }
/// It holds the raw bytes of memory accessed by a WebAssembly Instance /// It holds the raw bytes of memory accessed by a WebAssembly Instance
impl LinearMemory { impl LinearMemory {
pub const PAGE_SIZE: u32 = 65536; pub const PAGE_SIZE: u32 = 65_536;
pub const MAX_PAGES: u32 = 65536; pub const MAX_PAGES: u32 = 65_536;
pub const DEFAULT_HEAP_SIZE: usize = 1 << 32; // 4 GiB pub const DEFAULT_HEAP_SIZE: usize = 1 << 32; // 4 GiB
pub const DEFAULT_GUARD_SIZE: usize = 1 << 31; // 2 GiB pub const DEFAULT_GUARD_SIZE: usize = 1 << 31; // 2 GiB
pub const DEFAULT_SIZE: usize = Self::DEFAULT_HEAP_SIZE + Self::DEFAULT_GUARD_SIZE; // 6 GiB pub const DEFAULT_SIZE: usize = Self::DEFAULT_HEAP_SIZE + Self::DEFAULT_GUARD_SIZE; // 6 GiB
@ -43,46 +49,41 @@ impl LinearMemory {
/// Create a new linear memory instance with specified initial and maximum number of pages. /// Create a new linear memory instance with specified initial and maximum number of pages.
/// ///
/// `maximum` cannot be set to more than `65536` pages. /// `maximum` cannot be set to more than `65536` pages.
pub fn new(initial: u32, maximum: Option<u32>) -> Self { pub fn new(mem: &Memory) -> Self {
assert!(initial <= Self::MAX_PAGES); assert!(mem.min <= Self::MAX_PAGES);
assert!(maximum.is_none() || maximum.unwrap() <= Self::MAX_PAGES); assert!(mem.max.is_none() || mem.max.unwrap() <= Self::MAX_PAGES);
debug!( debug!("Instantiate LinearMemory(mem: {:?})", mem);
"Instantiate LinearMemory(initial={:?}, maximum={:?})",
initial, maximum
);
let mut mmap = Mmap::with_size(Self::DEFAULT_SIZE).expect("Can't create mmap"); let (mmap_size, initial_pages, offset_guard_size, requires_signal_catch) = if mem.is_static_heap() {
(Self::DEFAULT_SIZE, mem.min, Self::DEFAULT_GUARD_SIZE, true)
// This is a static heap
} else {
// this is a dynamic heap
assert!(!mem.shared, "shared memories must have a maximum size.");
let base = mmap.as_mut_ptr(); (mem.min as usize * Self::PAGE_SIZE as usize, mem.min, 0, false)
}
let mut mmap = Mmap::with_size(mmap_size).unwrap();
// map initial pages as readwrite since the inital mmap is mapped as not accessible. // map initial pages as readwrite since the inital mmap is mapped as not accessible.
if initial != 0 { if initial_pages != 0 {
unsafe { unsafe {
region::protect( region::protect(
base, mmap.as_mut_ptr(),
initial as usize * Self::PAGE_SIZE as usize, initial_pages as usize * Self::PAGE_SIZE as usize,
region::Protection::ReadWrite, region::Protection::ReadWrite,
) )
} }
.expect("unable to make memory inaccessible"); .expect("unable to make memory accessible");
} }
debug!("LinearMemory instantiated");
debug!(
" - usable: {:#x}..{:#x}",
base as usize,
(base as usize) + LinearMemory::DEFAULT_HEAP_SIZE
);
debug!(
" - guard: {:#x}..{:#x}",
(base as usize) + LinearMemory::DEFAULT_HEAP_SIZE,
(base as usize) + LinearMemory::DEFAULT_SIZE
);
Self { Self {
mmap, mmap,
current: initial, current: initial_pages,
offset_guard_size: LinearMemory::DEFAULT_GUARD_SIZE, max: mem.max,
maximum, offset_guard_size,
requires_signal_catch,
} }
} }
@ -116,8 +117,60 @@ impl LinearMemory {
/// ///
/// Returns `None` if memory can't be grown by the specified amount /// Returns `None` if memory can't be grown by the specified amount
/// of pages. /// of pages.
pub fn grow(&mut self, add_pages: u32) -> Option<i32> { pub fn grow_dynamic(&mut self, add_pages: u32) -> Option<i32> {
debug!("grow_memory called!"); debug!("grow_memory_dynamic called!");
assert!(self.max.is_none());
if add_pages == 0 {
return Some(self.current as _);
}
let prev_pages = self.current;
let new_pages = match self.current.checked_add(add_pages) {
Some(new_pages) => new_pages,
None => return None,
};
if let Some(val) = self.maximum {
if new_pages > val {
return None;
}
// Wasm linear memories are never allowed to grow beyond what is
// indexable. If the memory has no maximum, enforce the greatest
// limit here.
} else if new_pages >= Self::MAX_PAGES {
return None;
}
let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize;
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
if new_bytes > self.mmap.len() - self.offset_guard_size {
let mmap_size = new_bytes.checked_add(self.offset_guard_size)?;
let mut new_mmap = Mmap::with_size(request_bytes).ok()?;
unsafe {
region::protect(
new_mmap.as_mut_ptr(),
new_bytes,
region::Protection::ReadWrite,
).ok()?;
}
let copy_size = self.mmap.len() - self.offset_guard_size;
new_mmap.as_mut_slice()[..copy_size].copy_from_slice(self.mmap.as_slice()[..copy_size]);
self.mmap = new_mmap;
}
self.current = new_pages;
Some(prev_pages as i32)
}
pub fn grow_static(&mut self, add_pages: u32) -> Option<i32> {
debug!("grow_memory_static called!");
assert!(self.max.is_some());
if add_pages == 0 { if add_pages == 0 {
return Some(self.current as _); return Some(self.current as _);
} }
@ -143,40 +196,13 @@ impl LinearMemory {
let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize; let prev_bytes = (prev_pages * Self::PAGE_SIZE) as usize;
let new_bytes = (new_pages * Self::PAGE_SIZE) as usize; let new_bytes = (new_pages * Self::PAGE_SIZE) as usize;
// if new_bytes > self.mmap.len() - self.offset_guard_size {
unsafe { unsafe {
region::protect( region::protect(
self.mmap.as_ptr().add(prev_bytes) as _, self.mmap.as_ptr().add(prev_bytes) as _,
new_bytes - prev_bytes, new_bytes - prev_bytes,
region::Protection::ReadWrite, region::Protection::ReadWrite,
) ).ok()?;
} }
.expect("unable to make memory inaccessible");
// };
// if new_bytes > self.mmap.len() - self.offset_guard_size {
// // If we have no maximum, this is a "dynamic" heap, and it's allowed to move.
// assert!(self.maximum.is_none());
// let guard_bytes = self.offset_guard_size;
// let request_bytes = new_bytes.checked_add(guard_bytes)?;
// let mut new_mmap = Mmap::with_size(request_bytes).ok()?;
// // Make the offset-guard pages inaccessible.
// unsafe {
// region::protect(
// new_mmap.as_ptr().add(new_bytes),
// guard_bytes,
// region::Protection::Read | region::Protection::Write,
// // region::Protection::None,
// )
// }
// .expect("unable to make memory inaccessible");
// let copy_len = self.mmap.len() - self.offset_guard_size;
// new_mmap.as_mut_slice()[..copy_len].copy_from_slice(&self.mmap.as_slice()[..copy_len]);
// self.mmap = new_mmap;
// }
self.current = new_pages; self.current = new_pages;
@ -213,3 +239,8 @@ impl DerefMut for LinearMemory {
} }
} }
} }
fn round_up_to_page_size(size: usize) -> usize {
let page_size = region::page::size();
(size + (page_size - 1)) & !(page_size - 1)
}

9
src/runtime/mod.rs Normal file
View File

@ -0,0 +1,9 @@
pub mod vm;
pub mod backing;
pub mod types;
pub mod memory;
pub mod backend;
pub mod module;
pub mod instance;
pub mod table;

52
src/runtime/module.rs Normal file
View File

@ -0,0 +1,52 @@
use crate::runtime::types::{
Map,
FuncIndex, MemoryIndex, TableIndex, GlobalIndex,
Memory, Globals, GlobalDesc, Func, Table,
};
use crate::runtime::backend::FuncResolver;
/// This is used to instantiate a new webassembly module.
#[derive(Debug)]
pub struct Module {
pub functions: Box<dyn FuncResolver>,
pub memories: Map<Memory, MemoryIndex>,
pub globals: Map<Global, GlobalIndex>,
pub tables: Map<Table, TableIndex>,
pub imported_functions: Map<(ImportName, Func), FuncIndex>,
pub imported_memories: Map<(ImportName, Memory), MemoryIndex>,
pub imported_tables: Map<(ImportName, Table), TableIndex>,
pub imported_globals: Map<(ImportName, GlobalDesc), GlobalIndex>,
pub exported: Vec<(ItemName, Export)>,
pub data_initializers: Vec<DataInitializer>,
pub start_func: FuncIndex,
pub signatures: Map<Func, FuncIndex>,
}
pub type ModuleName = Vec<u8>;
pub type ItemName = Vec<u8>;
pub type ImportName = (ModuleName, ItemName);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Export {
Func(FuncIndex),
Memory(MemoryIndex),
Global(GlobalIndex),
Table(TableIndex),
}
/// A data initializer for linear memory.
#[derive(Debug)]
pub struct DataInitializer {
/// The index of the memory to initialize.
pub memory_index: MemoryIndex,
/// Optionally a globalvalue base to initialize at.
pub base: Option<GlobalIndex>,
/// A constant offset to initialize at.
pub offset: usize,
/// The initialization data.
pub data: Vec<u8>,
}

38
src/runtime/table.rs Normal file
View File

@ -0,0 +1,38 @@
use super::vm;
use crate::runtime::types::{ElementType, Table};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum TableElements {
/// This is intended to be a caller-checked Anyfunc.
Anyfunc(Box<[vm::Anyfunc]>),
}
#[derive(Debug)]
pub struct TableBacking {
pub elements: TableElements,
pub max: Option<u32>,
}
impl TableBacking {
pub fn new(table: &Table) -> Self {
match table.ty {
ElementType::Anyfunc => {
Self {
elements: TableElements::Anyfunc(vec![vm::Anyfunc::null(); table.min].into_boxed_slice()),
max: table.max,
}
}
}
}
pub fn into_vm_table(&mut self) -> vm::LocalTable {
match self.elements {
TableElements::Anyfunc(funcs) => {
vm::LocalTable {
base: funcs.as_mut_ptr() as *mut u8,
current_elements: funcs.len(),
}
},
}
}
}

270
src/runtime/types.rs Normal file
View File

@ -0,0 +1,270 @@
use std::marker::PhantomData;
use std::{slice, iter};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Type {
/// The `i32` type.
I32,
/// The `i64` type.
I64,
/// The `f32` type.
F32,
/// The `f64` type.
F64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Val {
/// The `i32` type.
I32(i32),
/// The `i64` type.
I64(i64),
/// The `f32` type.
F32(u32),
/// The `f64` type.
F64(u64),
}
impl From<i32> for Val {
fn from(n: i32) -> Self {
Self::I32(n)
}
}
impl From<i64> for Val {
fn from(n: i64) -> Self {
Self::I64(n)
}
}
impl From<f32> for Val {
fn from(n: f32) -> Self {
Self::F32(n.to_bits())
}
}
impl From<f64> for Val {
fn from(n: f64) -> Self {
Self::I64(n.to_bits())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ElementType {
/// Any wasm function.
Anyfunc,
}
#[derive(Debug, Clone, Copy)]
pub struct Table {
/// Type of data stored in this table.
pub ty: ElementType,
/// The minimum number of elements that must be stored in this table.
pub min: u32,
/// The maximum number of elements in this table.
pub max: Option<u32>,
}
/// A global value initializer.
/// Overtime, this will be able to represent more and more
/// complex expressions.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum GlobalInit {
Val(Val),
GetGlobal(GlobalIndex),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct GlobalDesc {
pub mutable: bool,
pub ty: Type,
}
/// A wasm global.
#[derive(Debug, Clone, Copy)]
pub struct Global {
pub desc: GlobalDesc,
pub init: GlobalInit,
}
/// A wasm memory.
#[derive(Debug, Clone, Copy)]
pub struct Memory {
/// The minimum number of allowed pages.
pub min: u32,
/// The maximum number of allowed pages.
pub max: Option<u32>,
/// This memory can be shared between wasm threads.
pub shared: bool,
}
impl Memory {
pub fn is_static_heap(&self) -> bool {
self.max.is_some()
}
}
/// A wasm func.
#[derive(Debug)]
pub struct Func {
pub params: Vec<Type>,
pub returns: Vec<Type>,
}
pub trait MapIndex {
fn new(index: usize) -> Self;
fn index(&self) -> usize;
}
/// Dense item map
#[derive(Debug, Clone)]
pub struct Map<T, I>
where
I: MapIndex,
{
elems: Vec<T>,
_marker: PhantomData<I>,
}
impl Map {
pub fn new() -> Self {
Self {
elems: Vec::new(),
_marker: PhantomData,
}
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
elems: Vec::with_capacity(capacity),
_marker: PhantomData,
}
}
pub fn get(&self, index: I) -> Option<&T> {
self.elems.get(index.index())
}
pub fn len(&self) -> usize {
self.elems.len()
}
pub fn push(&mut self, value: T) -> I {
let len = self.len();
self.elems.push(value);
I::new(len)
}
}
impl<T, I> Index<I> for Map<T, I>
where
I: MapIndex,
{
type Output = T;
fn index(&self, index: I) -> &T {
&self.elems[index.index()]
}
}
impl<T, I> IndexMut<I> for Map<T, I>
where
I: MapIndex,
{
fn index_mut(&mut self, index: I) -> &mut T {
&mut self.elems[index.index()]
}
}
impl<'a, T, I> IntoIterator for &'a Map<T, I>
where
I: MapIndex
{
type Item = (I, &'a T);
type IntoIter = Iter<'a, T, I>;
fn into_iter(self) -> Self::IntoIter {
Iter::new(self.elems.iter())
}
}
impl<'a, T, I> IntoIterator for &'a mut Map<T, I>
where
I: MapIndex
{
type Item = (I, &'a mut T);
type IntoIter = IterMut<'a, T, I>;
fn into_iter(self) -> Self::IntoIter {
Iter::new(self.elems.iter_mut())
}
}
pub struct Iter<'a, T: 'a, I: MapIndex> {
enumerated: iter::Enumerate<slice::Iter<'A, T>>,
_marker: PhantomData,
}
impl<'a, T: 'a, I: MapIndex> Iter<'a, T, I> {
fn new(iter: slice::Iter<'a, T>) -> Self {
Self {
enumerated: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, T: 'a, I: MapIndex> Iterator for Iter<'a, T, I> {
type Item = (I, &'a T);
fn next(&mut self) -> Self::Item {
self.enumerated.next().map(|i, v| (I::new(i), v))
}
}
pub struct IterMut<'a, T: 'a, I: MapIndex> {
enumerated: iter::Enumerate<slice::Iter<'A, T>>,
_marker: PhantomData,
}
impl<'a, T: 'a, I: MapIndex> IterMut<'a, T, I> {
fn new(iter: slice::Iter<'a, T>) -> Self {
Self {
enumerated: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, T: 'a, I: MapIndex> Iterator for IterMut<'a, T, I> {
type Item = (I, &'a mut T);
fn next(&mut self) -> Self::Item {
self.enumerated.next().map(|i, v| (I::new(i), v))
}
}
macro_rules! define_map_index {
($ty:ident) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct $ty (u32);
impl MapIndex for $ty {
fn new(index: usize) -> Self {
$ty (index as _)
}
fn index(&self) -> usize {
self.0 as usize
}
}
};
($($ty:ident),*) => {
$(
define_map_index!($ty);
)*
};
}
define_map_index![
FuncIndex, MemoryIndex, GlobalIndex, TableIndex,
SignatureIndex,
];

View File

@ -1,40 +1,38 @@
use std::{ptr, mem}; use std::{ptr, mem};
use crate::common::slice::IndexedSlice; use crate::runtime::types::{
use cranelift_wasm::{ MemoryIndex, TableIndex, GlobalIndex, FuncIndex,
TableIndex, MemoryIndex, GlobalIndex, FuncIndex,
DefinedTableIndex, DefinedMemoryIndex, DefinedGlobalIndex,
SignatureIndex, SignatureIndex,
}; };
#[derive(Debug)] #[derive(Debug)]
#[repr(C)] #[repr(C)]
pub struct VmCtx { pub struct Ctx<'a> {
/// A pointer to an array of locally-defined memories, indexed by `DefinedMemoryIndex`. /// A pointer to an array of locally-defined memories, indexed by `DefinedMemoryIndex`.
pub(in crate::webassembly) memories: IndexedSlice<LocalMemory, DefinedMemoryIndex>, pub memories: *mut LocalMemory,
/// A pointer to an array of locally-defined tables, indexed by `DefinedTableIndex`. /// A pointer to an array of locally-defined tables, indexed by `DefinedTableIndex`.
pub(in crate::webassembly) tables: IndexedSlice<LocalTable, DefinedTableIndex>, pub tables: *mut LocalTable,
/// A pointer to an array of locally-defined globals, indexed by `DefinedGlobalIndex`. /// A pointer to an array of locally-defined globals, indexed by `DefinedGlobalIndex`.
pub(in crate::webassembly) globals: IndexedSlice<LocalGlobal, DefinedGlobalIndex>, pub globals: *mut LocalGlobal,
/// A pointer to an array of imported memories, indexed by `MemoryIndex, /// A pointer to an array of imported memories, indexed by `MemoryIndex,
pub(in crate::webassembly) imported_memories: IndexedSlice<ImportedMemory, MemoryIndex>, pub imported_memories: *mut ImportedMemory,
/// A pointer to an array of imported tables, indexed by `TableIndex`. /// A pointer to an array of imported tables, indexed by `TableIndex`.
pub(in crate::webassembly) imported_tables: IndexedSlice<ImportedTable, TableIndex>, pub imported_tables: *mut ImportedTable,
/// A pointer to an array of imported globals, indexed by `GlobalIndex`. /// A pointer to an array of imported globals, indexed by `GlobalIndex`.
pub(in crate::webassembly) imported_globals: IndexedSlice<ImportedGlobal, GlobalIndex>, pub imported_globals: *mut ImportedGlobal,
/// A pointer to an array of imported functions, indexed by `FuncIndex`. /// A pointer to an array of imported functions, indexed by `FuncIndex`.
pub(in crate::webassembly) imported_funcs: IndexedSlice<ImportedFunc, FuncIndex>, pub imported_funcs: *mut ImportedFunc,
/// Signature identifiers for signature-checked indirect calls. /// Signature identifiers for signature-checked indirect calls.
pub(in crate::webassembly) sig_ids: IndexedSlice<SigId, SignatureIndex>, pub sig_ids: *mut SigId,
} }
impl VmCtx { impl Ctx {
pub fn new( pub fn new(
memories: *mut LocalMemory, memories: *mut LocalMemory,
tables: *mut LocalTable, tables: *mut LocalTable,
@ -46,14 +44,14 @@ impl VmCtx {
sig_ids: *mut SigId, sig_ids: *mut SigId,
) -> Self { ) -> Self {
Self { Self {
memories: IndexedSlice::new(memories), memories,
tables: IndexedSlice::new(tables), tables,
globals: IndexedSlice::new(globals), globals,
imported_memories: IndexedSlice::new(imported_memories), imported_memories,
imported_tables: IndexedSlice::new(imported_tables), imported_tables,
imported_globals: IndexedSlice::new(imported_globals), imported_globals,
imported_funcs: IndexedSlice::new(imported_funcs), imported_funcs,
sig_ids: IndexedSlice::new(sig_ids), sig_ids,
} }
} }
@ -100,7 +98,7 @@ pub enum Func {}
#[repr(C)] #[repr(C)]
pub struct ImportedFunc { pub struct ImportedFunc {
pub func: *const Func, pub func: *const Func,
pub vmctx: *mut VmCtx, pub vmctx: *mut Ctx,
} }
impl ImportedFunc { impl ImportedFunc {
@ -139,7 +137,7 @@ pub struct ImportedTable {
/// A pointer to the table definition. /// A pointer to the table definition.
pub table: *mut LocalTable, pub table: *mut LocalTable,
/// A pointer to the vmcontext that owns this table definition. /// A pointer to the vmcontext that owns this table definition.
pub vmctx: *mut VmCtx, pub vmctx: *mut Ctx,
} }
impl ImportedTable { impl ImportedTable {
@ -156,9 +154,9 @@ impl ImportedTable {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[repr(C)] #[repr(C)]
pub struct LocalMemory { pub struct LocalMemory {
/// Pointer to the bottom of linear memory. /// Pointer to the bottom of this linear memory.
pub base: *mut u8, pub base: *mut u8,
/// Current logical size of this linear memory in bytes. /// Current size of this linear memory in bytes.
pub size: usize, pub size: usize,
} }
@ -189,13 +187,19 @@ impl ImportedMemory {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[repr(C)] #[repr(C)]
pub struct LocalGlobal { pub struct LocalGlobal {
pub data: [u8; 8], pub data: u64,
} }
impl LocalGlobal { impl LocalGlobal {
pub fn offset_data() -> u8 { pub fn offset_data() -> u8 {
0 * (mem::size_of::<usize>() as u8) 0 * (mem::size_of::<usize>() as u8)
} }
pub fn null() -> Self {
Self {
data: 0,
}
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -217,12 +221,12 @@ pub struct SigId(u32);
/// Caller-checked anyfunc /// Caller-checked anyfunc
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[repr(C)] #[repr(C)]
pub struct CCAnyfunc { pub struct Anyfunc {
pub func_data: ImportedFunc, pub func_data: ImportedFunc,
pub sig_id: SigId, pub sig_id: SigId,
} }
impl CCAnyfunc { impl Anyfunc {
pub fn null() -> Self { pub fn null() -> Self {
Self { Self {
func_data: ImportedFunc { func_data: ImportedFunc {
@ -249,7 +253,7 @@ impl CCAnyfunc {
#[cfg(test)] #[cfg(test)]
mod vm_offset_tests { mod vm_offset_tests {
use super::{ use super::{
VmCtx, Ctx,
ImportedFunc, ImportedFunc,
LocalTable, LocalTable,
ImportedTable, ImportedTable,
@ -257,49 +261,49 @@ mod vm_offset_tests {
ImportedMemory, ImportedMemory,
LocalGlobal, LocalGlobal,
ImportedGlobal, ImportedGlobal,
CCAnyfunc, Anyfunc,
}; };
#[test] #[test]
fn vmctx() { fn vmctx() {
assert_eq!( assert_eq!(
VmCtx::offset_memories() as usize, Ctx::offset_memories() as usize,
offset_of!(VmCtx => memories).get_byte_offset(), offset_of!(Ctx => memories).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_tables() as usize, Ctx::offset_tables() as usize,
offset_of!(VmCtx => tables).get_byte_offset(), offset_of!(Ctx => tables).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_globals() as usize, Ctx::offset_globals() as usize,
offset_of!(VmCtx => globals).get_byte_offset(), offset_of!(Ctx => globals).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_imported_memories() as usize, Ctx::offset_imported_memories() as usize,
offset_of!(VmCtx => imported_memories).get_byte_offset(), offset_of!(Ctx => imported_memories).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_imported_tables() as usize, Ctx::offset_imported_tables() as usize,
offset_of!(VmCtx => imported_tables).get_byte_offset(), offset_of!(Ctx => imported_tables).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_imported_globals() as usize, Ctx::offset_imported_globals() as usize,
offset_of!(VmCtx => imported_globals).get_byte_offset(), offset_of!(Ctx => imported_globals).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_imported_funcs() as usize, Ctx::offset_imported_funcs() as usize,
offset_of!(VmCtx => imported_funcs).get_byte_offset(), offset_of!(Ctx => imported_funcs).get_byte_offset(),
); );
assert_eq!( assert_eq!(
VmCtx::offset_sig_ids() as usize, Ctx::offset_sig_ids() as usize,
offset_of!(VmCtx => sig_ids).get_byte_offset(), offset_of!(Ctx => sig_ids).get_byte_offset(),
); );
} }
@ -382,18 +386,18 @@ mod vm_offset_tests {
#[test] #[test]
fn cc_anyfunc() { fn cc_anyfunc() {
assert_eq!( assert_eq!(
CCAnyfunc::offset_func() as usize, Anyfunc::offset_func() as usize,
offset_of!(CCAnyfunc => func_data: ImportedFunc => func).get_byte_offset(), offset_of!(Anyfunc => func_data: ImportedFunc => func).get_byte_offset(),
); );
assert_eq!( assert_eq!(
CCAnyfunc::offset_vmctx() as usize, Anyfunc::offset_vmctx() as usize,
offset_of!(CCAnyfunc => func_data: ImportedFunc => vmctx).get_byte_offset(), offset_of!(Anyfunc => func_data: ImportedFunc => vmctx).get_byte_offset(),
); );
assert_eq!( assert_eq!(
CCAnyfunc::offset_sig_id() as usize, Anyfunc::offset_sig_id() as usize,
offset_of!(CCAnyfunc => sig_id).get_byte_offset(), offset_of!(Anyfunc => sig_id).get_byte_offset(),
); );
} }
} }

View File

@ -30,7 +30,7 @@ use super::memory::LinearMemory;
use super::module::{Export, ImportableExportable, Module}; use super::module::{Export, ImportableExportable, Module};
use super::relocation::{Reloc, RelocSink, RelocationType}; use super::relocation::{Reloc, RelocSink, RelocationType};
use super::vm; use super::vm;
use super::backing::{Backing, ImportsBacking}; use super::backing::{LocalBacking, ImportsBacking};
type TablesSlice = UncheckedSlice<BoundedSlice<usize>>; type TablesSlice = UncheckedSlice<BoundedSlice<usize>>;
// TODO: this should be `type MemoriesSlice = UncheckedSlice<UncheckedSlice<u8>>;`, but that crashes for some reason. // TODO: this should be `type MemoriesSlice = UncheckedSlice<UncheckedSlice<u8>>;`, but that crashes for some reason.
@ -96,13 +96,14 @@ pub enum InstanceABI {
#[derive(Debug)] #[derive(Debug)]
#[repr(C)] #[repr(C)]
pub struct Instance { pub struct Instance {
pub vmctx: vm::Ctx,
// C-like pointers to data (heaps, globals, tables) // C-like pointers to data (heaps, globals, tables)
pub data_pointers: DataPointers, pub data_pointers: DataPointers,
/// Webassembly functions /// Webassembly functions
finalized_funcs: Box<[*const vm::Func]>, finalized_funcs: Box<[*const vm::Func]>,
backing: Backing, backing: LocalBacking,
imports: ImportsBacking, imports: ImportsBacking,
@ -113,6 +114,14 @@ pub struct Instance {
pub emscripten_data: Option<EmscriptenData>, pub emscripten_data: Option<EmscriptenData>,
} }
impl Instance {
/// Shortcut for converting from a `vm::Ctx` pointer to a reference to the `Instance`.
/// This works because of the `vm::Ctx` is the first field of the `Instance`.
pub unsafe fn from_vmctx<'a>(ctx: *mut vm::Ctx) -> &'a mut Instance {
&mut *(ctx as *mut Instance)
}
}
/// Contains pointers to data (heaps, globals, tables) needed /// Contains pointers to data (heaps, globals, tables) needed
/// by Cranelift. /// by Cranelift.
/// NOTE: Rearranging the fields will break the memory arrangement model /// NOTE: Rearranging the fields will break the memory arrangement model

View File

@ -1,54 +0,0 @@
use super::vm::{CCAnyfunc, LocalTable};
use cranelift_wasm::{
Table as ClifTable,
TableElementType,
};
pub struct TableScheme {
table: ClifTable,
info: TableInfo,
}
impl TableScheme {
pub fn from_table(table: ClifTable) -> Self {
Self {
table,
info: match table.ty {
TableElementType::Func => TableInfo::CallerChecks,
TableElementType::Val(_) => unimplemented!(),
},
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum TableInfo {
CallerChecks,
}
#[derive(Debug)]
pub struct TableBacking {
pub elements: Box<[CCAnyfunc]>,
pub max: Option<u32>,
}
impl TableBacking {
pub fn new(scheme: &TableScheme) -> Self {
match (scheme.table.ty, scheme.info) {
(TableElementType::Func, TableInfo::CallerChecks) => {
TableBacking {
elements: vec![CCAnyfunc::null(); scheme.table.minimum as usize].into(),
max: scheme.table.maximum,
}
},
(TableElementType::Val(_), _) => unimplemented!(),
}
}
pub fn into_vm_table(&mut self) -> LocalTable {
LocalTable {
base: self.elements.as_mut_ptr() as *mut u8,
current_elements: self.elements.len(),
}
}
}