mirror of
https://github.com/fluencelabs/wasmer
synced 2025-03-15 07:40:50 +00:00
Remove memory_base and memory_bound from InternalCtx.
This commit is contained in:
parent
231b1c20bf
commit
c1f91c6572
@ -215,8 +215,6 @@ impl Intrinsics {
|
||||
let sigindex_ty = i32_ty;
|
||||
let rt_intrinsics_ty = i8_ty;
|
||||
let stack_lower_bound_ty = i8_ty;
|
||||
let memory_base_ty = i8_ty;
|
||||
let memory_bound_ty = i8_ty;
|
||||
let internals_ty = i64_ty;
|
||||
let interrupt_signal_mem_ty = i8_ty;
|
||||
let local_function_ty = i8_ptr_ty;
|
||||
@ -268,12 +266,6 @@ impl Intrinsics {
|
||||
stack_lower_bound_ty
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.as_basic_type_enum(),
|
||||
memory_base_ty
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.as_basic_type_enum(),
|
||||
memory_bound_ty
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.as_basic_type_enum(),
|
||||
internals_ty
|
||||
.ptr_type(AddressSpace::Generic)
|
||||
.as_basic_type_enum(),
|
||||
|
@ -31,10 +31,6 @@ pub enum ValueSemantic {
|
||||
WasmStack(usize),
|
||||
Ctx,
|
||||
SignalMem,
|
||||
PointerToMemoryBase,
|
||||
PointerToMemoryBound, // 64-bit
|
||||
MemoryBase,
|
||||
MemoryBound, // 64-bit
|
||||
PointerToGlobal(usize),
|
||||
Global(usize),
|
||||
PointerToTableBase,
|
||||
@ -128,18 +124,6 @@ impl StackmapEntry {
|
||||
ValueSemantic::SignalMem => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_interrupt_signal_mem() as usize, 0])
|
||||
}
|
||||
ValueSemantic::PointerToMemoryBase => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize])
|
||||
}
|
||||
ValueSemantic::PointerToMemoryBound => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize])
|
||||
}
|
||||
ValueSemantic::MemoryBase => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_base() as usize, 0])
|
||||
}
|
||||
ValueSemantic::MemoryBound => {
|
||||
MachineValue::VmctxDeref(vec![Ctx::offset_memory_bound() as usize, 0])
|
||||
}
|
||||
ValueSemantic::PointerToGlobal(idx) => {
|
||||
MachineValue::VmctxDeref(deref_global(module_info, idx, false))
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Bound::{Included, Unbounded};
|
||||
|
||||
use crate::units::Bytes;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct RegisterIndex(pub usize);
|
||||
|
||||
@ -705,21 +707,22 @@ pub mod x64 {
|
||||
known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
|
||||
|
||||
if let Some(ref memory) = image.memory {
|
||||
assert!(vmctx.internal.memory_bound <= memory.len());
|
||||
|
||||
if vmctx.internal.memory_bound < memory.len() {
|
||||
let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
|
||||
::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
|
||||
grow(
|
||||
vmctx,
|
||||
0,
|
||||
(memory.len() - vmctx.internal.memory_bound) / 65536,
|
||||
);
|
||||
assert_eq!(vmctx.internal.memory_bound, memory.len());
|
||||
let len_in_bytes = Bytes(memory.len());
|
||||
assert!(vmctx.memory(0).size().bytes() <= len_in_bytes);
|
||||
if vmctx.memory(0).size().bytes() <= len_in_bytes {
|
||||
vmctx
|
||||
.memory(0)
|
||||
.grow((len_in_bytes - vmctx.memory(0).size().bytes()).into())
|
||||
.unwrap();
|
||||
assert!(vmctx.memory(0).size().bytes() >= len_in_bytes);
|
||||
}
|
||||
|
||||
std::slice::from_raw_parts_mut(vmctx.internal.memory_base, vmctx.internal.memory_bound)
|
||||
.copy_from_slice(memory);
|
||||
vmctx
|
||||
.memory(0)
|
||||
.view::<u8>()
|
||||
.iter()
|
||||
.zip(memory)
|
||||
.for_each(|(dst, src)| dst.set(*src));
|
||||
}
|
||||
|
||||
let globals_len = (*vmctx.module).info.globals.len();
|
||||
@ -745,34 +748,34 @@ pub mod x64 {
|
||||
vmctx: &mut Ctx,
|
||||
execution_state: ExecutionStateImage,
|
||||
) -> InstanceImage {
|
||||
unsafe {
|
||||
let memory = if vmctx.internal.memory_base.is_null() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
std::slice::from_raw_parts(
|
||||
vmctx.internal.memory_base,
|
||||
vmctx.internal.memory_bound,
|
||||
)
|
||||
.to_vec(),
|
||||
)
|
||||
};
|
||||
let info = unsafe { &(*vmctx.module).info };
|
||||
let memory = if info.memories.len() == 0 && info.memories.len() == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
vmctx
|
||||
.memory(0)
|
||||
.view::<u8>()
|
||||
.iter()
|
||||
.map(|c| c.get())
|
||||
.collect(),
|
||||
)
|
||||
};
|
||||
|
||||
// FIXME: Imported globals
|
||||
let globals_len = (*vmctx.module).info.globals.len();
|
||||
let globals: Vec<u128> = (0..globals_len)
|
||||
.map(|i| {
|
||||
(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
|
||||
.get()
|
||||
.to_u128()
|
||||
})
|
||||
.collect();
|
||||
// FIXME: Imported globals
|
||||
let globals_len = info.globals.len();
|
||||
let globals: Vec<u128> = (0..globals_len)
|
||||
.map(|i| unsafe {
|
||||
(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
|
||||
.get()
|
||||
.to_u128()
|
||||
})
|
||||
.collect();
|
||||
|
||||
InstanceImage {
|
||||
memory: memory,
|
||||
globals: globals,
|
||||
execution_state: execution_state,
|
||||
}
|
||||
InstanceImage {
|
||||
memory: memory,
|
||||
globals: globals,
|
||||
execution_state: execution_state,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,9 +114,6 @@ pub struct InternalCtx {
|
||||
|
||||
pub stack_lower_bound: *mut u8,
|
||||
|
||||
pub memory_base: *mut u8,
|
||||
pub memory_bound: usize,
|
||||
|
||||
pub internals: *mut [u64; INTERNALS_SIZE], // TODO: Make this dynamic?
|
||||
|
||||
pub interrupt_signal_mem: *mut u8,
|
||||
@ -245,16 +242,6 @@ impl Ctx {
|
||||
import_backing: &mut ImportBacking,
|
||||
module: &ModuleInner,
|
||||
) -> Self {
|
||||
let (mem_base, mem_bound): (*mut u8, usize) =
|
||||
if module.info.memories.len() == 0 && module.info.imported_memories.len() == 0 {
|
||||
(::std::ptr::null_mut(), 0)
|
||||
} else {
|
||||
let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
|
||||
LocalOrImport::Local(index) => local_backing.vm_memories[index],
|
||||
LocalOrImport::Import(index) => import_backing.vm_memories[index],
|
||||
};
|
||||
((*mem).base, (*mem).bound)
|
||||
};
|
||||
Self {
|
||||
internal: InternalCtx {
|
||||
memories: local_backing.vm_memories.as_mut_ptr(),
|
||||
@ -272,9 +259,6 @@ impl Ctx {
|
||||
|
||||
stack_lower_bound: ::std::ptr::null_mut(),
|
||||
|
||||
memory_base: mem_base,
|
||||
memory_bound: mem_bound,
|
||||
|
||||
internals: &mut local_backing.internals.0,
|
||||
|
||||
interrupt_signal_mem: get_interrupt_signal_mem(),
|
||||
@ -298,16 +282,6 @@ impl Ctx {
|
||||
data: *mut c_void,
|
||||
data_finalizer: fn(*mut c_void),
|
||||
) -> Self {
|
||||
let (mem_base, mem_bound): (*mut u8, usize) =
|
||||
if module.info.memories.len() == 0 && module.info.imported_memories.len() == 0 {
|
||||
(::std::ptr::null_mut(), 0)
|
||||
} else {
|
||||
let mem = match MemoryIndex::new(0).local_or_import(&module.info) {
|
||||
LocalOrImport::Local(index) => local_backing.vm_memories[index],
|
||||
LocalOrImport::Import(index) => import_backing.vm_memories[index],
|
||||
};
|
||||
((*mem).base, (*mem).bound)
|
||||
};
|
||||
Self {
|
||||
internal: InternalCtx {
|
||||
memories: local_backing.vm_memories.as_mut_ptr(),
|
||||
@ -325,9 +299,6 @@ impl Ctx {
|
||||
|
||||
stack_lower_bound: ::std::ptr::null_mut(),
|
||||
|
||||
memory_base: mem_base,
|
||||
memory_bound: mem_bound,
|
||||
|
||||
internals: &mut local_backing.internals.0,
|
||||
|
||||
interrupt_signal_mem: get_interrupt_signal_mem(),
|
||||
@ -477,24 +448,16 @@ impl Ctx {
|
||||
9 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
|
||||
pub fn offset_memory_base() -> u8 {
|
||||
pub fn offset_internals() -> u8 {
|
||||
10 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
|
||||
pub fn offset_memory_bound() -> u8 {
|
||||
pub fn offset_interrupt_signal_mem() -> u8 {
|
||||
11 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
|
||||
pub fn offset_internals() -> u8 {
|
||||
12 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
|
||||
pub fn offset_interrupt_signal_mem() -> u8 {
|
||||
13 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
|
||||
pub fn offset_local_functions() -> u8 {
|
||||
14 * (mem::size_of::<usize>() as u8)
|
||||
12 * (mem::size_of::<usize>() as u8)
|
||||
}
|
||||
}
|
||||
|
||||
@ -711,16 +674,6 @@ mod vm_offset_tests {
|
||||
offset_of!(InternalCtx => stack_lower_bound).get_byte_offset(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Ctx::offset_memory_base() as usize,
|
||||
offset_of!(InternalCtx => memory_base).get_byte_offset(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Ctx::offset_memory_bound() as usize,
|
||||
offset_of!(InternalCtx => memory_bound).get_byte_offset(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Ctx::offset_internals() as usize,
|
||||
offset_of!(InternalCtx => internals).get_byte_offset(),
|
||||
|
@ -25,9 +25,6 @@ pub unsafe extern "C" fn local_static_memory_grow(
|
||||
Err(_) => -1,
|
||||
};
|
||||
|
||||
ctx.internal.memory_base = (*local_memory).base;
|
||||
ctx.internal.memory_bound = (*local_memory).bound;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@ -54,9 +51,6 @@ pub unsafe extern "C" fn local_dynamic_memory_grow(
|
||||
Err(_) => -1,
|
||||
};
|
||||
|
||||
ctx.internal.memory_base = (*local_memory).base;
|
||||
ctx.internal.memory_bound = (*local_memory).bound;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@ -90,9 +84,6 @@ pub unsafe extern "C" fn imported_static_memory_grow(
|
||||
Err(_) => -1,
|
||||
};
|
||||
|
||||
ctx.internal.memory_base = (*local_memory).base;
|
||||
ctx.internal.memory_bound = (*local_memory).bound;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@ -122,9 +113,6 @@ pub unsafe extern "C" fn imported_dynamic_memory_grow(
|
||||
Err(_) => -1,
|
||||
};
|
||||
|
||||
ctx.internal.memory_base = (*local_memory).base;
|
||||
ctx.internal.memory_bound = (*local_memory).bound;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,7 @@ use wasmer_runtime_core::{
|
||||
TableIndex, Type,
|
||||
},
|
||||
vm::{self, LocalGlobal, LocalTable, INTERNALS_SIZE},
|
||||
vmcalls,
|
||||
};
|
||||
use wasmparser::{Operator, Type as WpType, TypeOrFuncType as WpTypeOrFuncType};
|
||||
|
||||
@ -1489,25 +1490,50 @@ impl X64FunctionCode {
|
||||
let tmp_base = m.acquire_temp_gpr().unwrap();
|
||||
let tmp_bound = m.acquire_temp_gpr().unwrap();
|
||||
|
||||
// Load base into temporary register.
|
||||
// Load pointer to array of memories.
|
||||
match MemoryIndex::new(0).local_or_import(module_info) {
|
||||
LocalOrImport::Local(_local_mem_index) => {
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(Machine::get_vmctx_reg(), vm::Ctx::offset_memories() as i32),
|
||||
Location::GPR(tmp_base),
|
||||
);
|
||||
}
|
||||
LocalOrImport::Import(_import_mem_index) => {
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(
|
||||
Machine::get_vmctx_reg(),
|
||||
vm::Ctx::offset_imported_memories() as i32,
|
||||
),
|
||||
Location::GPR(tmp_base),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Load pointer to first memory.
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(
|
||||
Machine::get_vmctx_reg(),
|
||||
vm::Ctx::offset_memory_base() as i32,
|
||||
),
|
||||
Location::Memory(tmp_base, 0),
|
||||
Location::GPR(tmp_base),
|
||||
);
|
||||
|
||||
// The first member is the base, the second member is the bound.
|
||||
// Load bound pointer before clobbering tmp_base.
|
||||
if need_check {
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(tmp_base, 8),
|
||||
Location::GPR(tmp_bound),
|
||||
);
|
||||
}
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(tmp_base, 0),
|
||||
Location::GPR(tmp_base),
|
||||
);
|
||||
|
||||
if need_check {
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(
|
||||
Machine::get_vmctx_reg(),
|
||||
vm::Ctx::offset_memory_bound() as i32,
|
||||
),
|
||||
Location::GPR(tmp_bound),
|
||||
);
|
||||
// Adds base to bound so `tmp_bound` now holds the end of linear memory.
|
||||
a.emit_add(Size::S64, Location::GPR(tmp_base), Location::GPR(tmp_bound));
|
||||
a.emit_mov(Size::S32, addr, Location::GPR(tmp_addr));
|
||||
@ -4055,17 +4081,29 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
Operator::Nop => {}
|
||||
Operator::MemorySize { reserved } => {
|
||||
let memory_index = MemoryIndex::new(reserved as usize);
|
||||
let func_value: *const i8 = match memory_index.local_or_import(module_info) {
|
||||
LocalOrImport::Local(local_mem_index) => {
|
||||
let mem_desc = &module_info.memories[local_mem_index];
|
||||
match mem_desc.memory_type() {
|
||||
MemoryType::Dynamic => vmcalls::local_dynamic_memory_size as _,
|
||||
MemoryType::Static | MemoryType::SharedStatic => {
|
||||
vmcalls::local_static_memory_size as _
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOrImport::Import(import_mem_index) => {
|
||||
let mem_desc = &module_info.imported_memories[import_mem_index].1;
|
||||
match mem_desc.memory_type() {
|
||||
MemoryType::Dynamic => vmcalls::imported_dynamic_memory_size as _,
|
||||
MemoryType::Static | MemoryType::SharedStatic => {
|
||||
vmcalls::imported_static_memory_size as _
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(
|
||||
Machine::get_vmctx_reg(),
|
||||
vm::Ctx::offset_intrinsics() as i32,
|
||||
),
|
||||
Location::GPR(GPR::RAX),
|
||||
);
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(GPR::RAX, vm::Intrinsics::offset_memory_size() as i32),
|
||||
Location::Imm64(func_value as _),
|
||||
Location::GPR(GPR::RAX),
|
||||
);
|
||||
Self::emit_call_sysv(
|
||||
@ -4087,26 +4125,36 @@ impl FunctionCodeGenerator<CodegenError> for X64FunctionCode {
|
||||
}
|
||||
Operator::MemoryGrow { reserved } => {
|
||||
let memory_index = MemoryIndex::new(reserved as usize);
|
||||
let func_value: *const i8 = match memory_index.local_or_import(module_info) {
|
||||
LocalOrImport::Local(local_mem_index) => {
|
||||
let mem_desc = &module_info.memories[local_mem_index];
|
||||
match mem_desc.memory_type() {
|
||||
MemoryType::Dynamic => vmcalls::local_dynamic_memory_grow as _,
|
||||
MemoryType::Static | MemoryType::SharedStatic => {
|
||||
vmcalls::local_static_memory_grow as _
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOrImport::Import(import_mem_index) => {
|
||||
let mem_desc = &module_info.imported_memories[import_mem_index].1;
|
||||
match mem_desc.memory_type() {
|
||||
MemoryType::Dynamic => vmcalls::imported_dynamic_memory_grow as _,
|
||||
MemoryType::Static | MemoryType::SharedStatic => {
|
||||
vmcalls::imported_static_memory_grow as _
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
let param_pages = self.value_stack.pop().unwrap();
|
||||
|
||||
self.machine.release_locations_only_regs(&[param_pages]);
|
||||
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(
|
||||
Machine::get_vmctx_reg(),
|
||||
vm::Ctx::offset_intrinsics() as i32,
|
||||
),
|
||||
Location::GPR(GPR::RAX),
|
||||
);
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Memory(GPR::RAX, vm::Intrinsics::offset_memory_grow() as i32),
|
||||
Location::GPR(GPR::RAX),
|
||||
);
|
||||
|
||||
self.machine.release_locations_only_osr_state(1);
|
||||
|
||||
a.emit_mov(
|
||||
Size::S64,
|
||||
Location::Imm64(func_value as _),
|
||||
Location::GPR(GPR::RAX),
|
||||
);
|
||||
Self::emit_call_sysv(
|
||||
a,
|
||||
&mut self.machine,
|
||||
|
Loading…
x
Reference in New Issue
Block a user