mirror of
https://github.com/fluencelabs/wasmer
synced 2025-04-17 14:42:12 +00:00
Allow a range of instruction offsets to be used in ip lookup.
This commit is contained in:
parent
efb5277d1d
commit
0a7f95ee06
@ -12,13 +12,13 @@ use libc::{
|
|||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
any::Any,
|
any::Any,
|
||||||
|
collections::BTreeMap,
|
||||||
ffi::{c_void, CString},
|
ffi::{c_void, CString},
|
||||||
mem,
|
mem,
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
ptr::{self, NonNull},
|
ptr::{self, NonNull},
|
||||||
slice, str,
|
slice, str,
|
||||||
sync::{Arc, Once},
|
sync::{Arc, Once},
|
||||||
collections::BTreeMap,
|
|
||||||
};
|
};
|
||||||
use wasmer_runtime_core::{
|
use wasmer_runtime_core::{
|
||||||
backend::{
|
backend::{
|
||||||
@ -347,16 +347,31 @@ impl LLVMBackend {
|
|||||||
map_records.insert(r.patchpoint_id as usize, r);
|
map_records.insert(r.patchpoint_id as usize, r);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i, entry) in stackmaps.entries.iter().enumerate() {
|
for ((start_id, start_entry), (end_id, end_entry)) in stackmaps
|
||||||
if let Some(map_record) = map_records.get(&i) {
|
.entries
|
||||||
assert_eq!(i, map_record.patchpoint_id as usize);
|
.iter()
|
||||||
let addr = local_func_id_to_addr[entry.local_function_id];
|
.enumerate()
|
||||||
let size_record = *addr_to_size_record.get(&addr).expect("size_record not found");
|
.step_by(2)
|
||||||
entry.populate_msm(
|
.zip(stackmaps.entries.iter().enumerate().skip(1).step_by(2))
|
||||||
|
{
|
||||||
|
if let Some(map_record) = map_records.get(&start_id) {
|
||||||
|
assert_eq!(start_id, map_record.patchpoint_id as usize);
|
||||||
|
assert!(start_entry.is_start);
|
||||||
|
assert!(!end_entry.is_start);
|
||||||
|
|
||||||
|
let end_record = map_records.get(&end_id);
|
||||||
|
|
||||||
|
let addr = local_func_id_to_addr[start_entry.local_function_id];
|
||||||
|
let size_record = *addr_to_size_record
|
||||||
|
.get(&addr)
|
||||||
|
.expect("size_record not found");
|
||||||
|
|
||||||
|
start_entry.populate_msm(
|
||||||
code_ptr as usize,
|
code_ptr as usize,
|
||||||
&map,
|
&map,
|
||||||
size_record,
|
size_record,
|
||||||
map_record,
|
map_record,
|
||||||
|
end_record.map(|x| (end_entry, *x)),
|
||||||
&mut msm,
|
&mut msm,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
@ -342,6 +342,38 @@ fn emit_stack_map(
|
|||||||
stack_count: state.stack.len(),
|
stack_count: state.stack.len(),
|
||||||
opcode_offset,
|
opcode_offset,
|
||||||
value_semantics,
|
value_semantics,
|
||||||
|
is_start: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finalize_opcode_stack_map(
|
||||||
|
intrinsics: &Intrinsics,
|
||||||
|
builder: &Builder,
|
||||||
|
local_function_id: usize,
|
||||||
|
target: &mut StackmapRegistry,
|
||||||
|
kind: StackmapEntryKind,
|
||||||
|
opcode_offset: usize,
|
||||||
|
) {
|
||||||
|
let stackmap_id = target.entries.len();
|
||||||
|
builder.build_call(
|
||||||
|
intrinsics.experimental_stackmap,
|
||||||
|
&[
|
||||||
|
intrinsics
|
||||||
|
.i64_ty
|
||||||
|
.const_int(stackmap_id as u64, false)
|
||||||
|
.as_basic_value_enum(),
|
||||||
|
intrinsics.i32_ty.const_int(0, false).as_basic_value_enum(),
|
||||||
|
],
|
||||||
|
"opcode_stack_map_end",
|
||||||
|
);
|
||||||
|
target.entries.push(StackmapEntry {
|
||||||
|
kind,
|
||||||
|
local_function_id,
|
||||||
|
local_count: 0,
|
||||||
|
stack_count: 0,
|
||||||
|
opcode_offset,
|
||||||
|
value_semantics: vec![],
|
||||||
|
is_start: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -448,7 +480,6 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
|
|
||||||
self.ctx = Some(ctx);
|
self.ctx = Some(ctx);
|
||||||
|
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut state = &mut self.state;
|
let mut state = &mut self.state;
|
||||||
let builder = self.builder.as_ref().unwrap();
|
let builder = self.builder.as_ref().unwrap();
|
||||||
@ -465,6 +496,14 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
&state,
|
&state,
|
||||||
::std::usize::MAX,
|
::std::usize::MAX,
|
||||||
);
|
);
|
||||||
|
finalize_opcode_stack_map(
|
||||||
|
&intrinsics,
|
||||||
|
&builder,
|
||||||
|
self.index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::FunctionHeader,
|
||||||
|
::std::usize::MAX,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -574,7 +613,19 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
&self.locals,
|
&self.locals,
|
||||||
state,
|
state,
|
||||||
offset,
|
offset,
|
||||||
)
|
);
|
||||||
|
let signal_mem = ctx.signal_mem();
|
||||||
|
let iv = builder
|
||||||
|
.build_store(signal_mem, context.i8_type().const_int(0 as u64, false));
|
||||||
|
iv.set_volatile(true);
|
||||||
|
finalize_opcode_stack_map(
|
||||||
|
intrinsics,
|
||||||
|
builder,
|
||||||
|
self.index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::Loop,
|
||||||
|
offset,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
state.push_loop(loop_body, loop_next, phis);
|
state.push_loop(loop_body, loop_next, phis);
|
||||||
@ -850,10 +901,14 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
state,
|
state,
|
||||||
offset,
|
offset,
|
||||||
);
|
);
|
||||||
builder.build_call(
|
builder.build_call(intrinsics.trap, &[], "trap");
|
||||||
intrinsics.trap,
|
finalize_opcode_stack_map(
|
||||||
&[],
|
intrinsics,
|
||||||
"trap",
|
builder,
|
||||||
|
self.index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::Trappable,
|
||||||
|
offset + 1,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1008,6 +1063,17 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
let call_site = builder.build_call(func_ptr, ¶ms, &state.var_name());
|
let call_site = builder.build_call(func_ptr, ¶ms, &state.var_name());
|
||||||
|
if let Some(offset) = opcode_offset {
|
||||||
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
|
finalize_opcode_stack_map(
|
||||||
|
intrinsics,
|
||||||
|
builder,
|
||||||
|
self.index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::Call,
|
||||||
|
offset,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(basic_value) = call_site.try_as_basic_value().left() {
|
if let Some(basic_value) = call_site.try_as_basic_value().left() {
|
||||||
match func_sig.returns().len() {
|
match func_sig.returns().len() {
|
||||||
@ -1186,6 +1252,17 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
|
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
|
||||||
|
if let Some(offset) = opcode_offset {
|
||||||
|
let mut stackmaps = self.stackmaps.borrow_mut();
|
||||||
|
finalize_opcode_stack_map(
|
||||||
|
intrinsics,
|
||||||
|
builder,
|
||||||
|
self.index,
|
||||||
|
&mut *stackmaps,
|
||||||
|
StackmapEntryKind::Call,
|
||||||
|
offset,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
match wasmer_fn_sig.returns() {
|
match wasmer_fn_sig.returns() {
|
||||||
[] => {}
|
[] => {}
|
||||||
@ -2679,8 +2756,12 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
|
|||||||
|
|
||||||
let stackmaps = self.stackmaps.borrow();
|
let stackmaps = self.stackmaps.borrow();
|
||||||
|
|
||||||
let (backend, cache_gen) =
|
let (backend, cache_gen) = LLVMBackend::new(
|
||||||
LLVMBackend::new(self.module, self.intrinsics.take().unwrap(), &*stackmaps, module_info);
|
self.module,
|
||||||
|
self.intrinsics.take().unwrap(),
|
||||||
|
&*stackmaps,
|
||||||
|
module_info,
|
||||||
|
);
|
||||||
Ok((backend, Box::new(cache_gen)))
|
Ok((backend, Box::new(cache_gen)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -445,6 +445,8 @@ pub struct CtxType<'a> {
|
|||||||
info: &'a ModuleInfo,
|
info: &'a ModuleInfo,
|
||||||
cache_builder: Builder,
|
cache_builder: Builder,
|
||||||
|
|
||||||
|
cached_signal_mem: Option<PointerValue>,
|
||||||
|
|
||||||
cached_memories: HashMap<MemoryIndex, MemoryCache>,
|
cached_memories: HashMap<MemoryIndex, MemoryCache>,
|
||||||
cached_tables: HashMap<TableIndex, TableCache>,
|
cached_tables: HashMap<TableIndex, TableCache>,
|
||||||
cached_sigindices: HashMap<SigIndex, IntValue>,
|
cached_sigindices: HashMap<SigIndex, IntValue>,
|
||||||
@ -470,6 +472,8 @@ impl<'a> CtxType<'a> {
|
|||||||
info,
|
info,
|
||||||
cache_builder,
|
cache_builder,
|
||||||
|
|
||||||
|
cached_signal_mem: None,
|
||||||
|
|
||||||
cached_memories: HashMap::new(),
|
cached_memories: HashMap::new(),
|
||||||
cached_tables: HashMap::new(),
|
cached_tables: HashMap::new(),
|
||||||
cached_sigindices: HashMap::new(),
|
cached_sigindices: HashMap::new(),
|
||||||
@ -484,6 +488,27 @@ impl<'a> CtxType<'a> {
|
|||||||
self.ctx_ptr_value.as_basic_value_enum()
|
self.ctx_ptr_value.as_basic_value_enum()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn signal_mem(&mut self) -> PointerValue {
|
||||||
|
if let Some(x) = self.cached_signal_mem {
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (ctx_ptr_value, cache_builder) = (self.ctx_ptr_value, &self.cache_builder);
|
||||||
|
|
||||||
|
let ptr_ptr = unsafe {
|
||||||
|
cache_builder.build_struct_gep(
|
||||||
|
ctx_ptr_value,
|
||||||
|
offset_to_index(Ctx::offset_interrupt_signal_mem()),
|
||||||
|
"interrupt_signal_mem_ptr",
|
||||||
|
)
|
||||||
|
};
|
||||||
|
let ptr = cache_builder
|
||||||
|
.build_load(ptr_ptr, "interrupt_signal_mem")
|
||||||
|
.into_pointer_value();
|
||||||
|
self.cached_signal_mem = Some(ptr);
|
||||||
|
ptr
|
||||||
|
}
|
||||||
|
|
||||||
pub fn memory(&mut self, index: MemoryIndex, intrinsics: &Intrinsics) -> MemoryCache {
|
pub fn memory(&mut self, index: MemoryIndex, intrinsics: &Intrinsics) -> MemoryCache {
|
||||||
let (cached_memories, info, ctx_ptr_value, cache_builder) = (
|
let (cached_memories, info, ctx_ptr_value, cache_builder) = (
|
||||||
&mut self.cached_memories,
|
&mut self.cached_memories,
|
||||||
|
@ -22,6 +22,7 @@ pub struct StackmapEntry {
|
|||||||
pub value_semantics: Vec<ValueSemantic>,
|
pub value_semantics: Vec<ValueSemantic>,
|
||||||
pub local_count: usize,
|
pub local_count: usize,
|
||||||
pub stack_count: usize,
|
pub stack_count: usize,
|
||||||
|
pub is_start: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -30,7 +31,7 @@ pub enum ValueSemantic {
|
|||||||
WasmStack(usize),
|
WasmStack(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||||
pub enum StackmapEntryKind {
|
pub enum StackmapEntryKind {
|
||||||
FunctionHeader,
|
FunctionHeader,
|
||||||
Loop,
|
Loop,
|
||||||
@ -72,6 +73,7 @@ impl StackmapEntry {
|
|||||||
llvm_map: &StackMap,
|
llvm_map: &StackMap,
|
||||||
size_record: &StkSizeRecord,
|
size_record: &StkSizeRecord,
|
||||||
map_record: &StkMapRecord,
|
map_record: &StkMapRecord,
|
||||||
|
end: Option<(&StackmapEntry, &StkMapRecord)>,
|
||||||
msm: &mut ModuleStateMap,
|
msm: &mut ModuleStateMap,
|
||||||
) {
|
) {
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@ -80,12 +82,23 @@ impl StackmapEntry {
|
|||||||
Constant(u64),
|
Constant(u64),
|
||||||
}
|
}
|
||||||
|
|
||||||
let fsm = msm
|
let func_base_addr = (size_record.function_address as usize)
|
||||||
.local_functions
|
.checked_sub(code_addr)
|
||||||
.entry(self.local_function_id)
|
.unwrap();
|
||||||
.or_insert_with(|| {
|
let target_offset = func_base_addr + map_record.instruction_offset as usize;
|
||||||
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![])
|
|
||||||
});
|
if msm.local_functions.len() == self.local_function_id {
|
||||||
|
assert_eq!(self.kind, StackmapEntryKind::FunctionHeader);
|
||||||
|
msm.local_functions.insert(
|
||||||
|
target_offset,
|
||||||
|
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![]),
|
||||||
|
);
|
||||||
|
} else if msm.local_functions.len() == self.local_function_id + 1 {
|
||||||
|
} else {
|
||||||
|
panic!("unordered local functions");
|
||||||
|
}
|
||||||
|
|
||||||
|
let (_, fsm) = msm.local_functions.iter_mut().last().unwrap();
|
||||||
|
|
||||||
assert_eq!(self.value_semantics.len(), map_record.locations.len());
|
assert_eq!(self.value_semantics.len(), map_record.locations.len());
|
||||||
|
|
||||||
@ -94,7 +107,8 @@ impl StackmapEntry {
|
|||||||
assert!(size_record.stack_size % 16 == 8);
|
assert!(size_record.stack_size % 16 == 8);
|
||||||
|
|
||||||
// Layout begins just below saved rbp. (push rbp; mov rbp, rsp)
|
// Layout begins just below saved rbp. (push rbp; mov rbp, rsp)
|
||||||
let mut machine_stack_half_layout: Vec<MachineValue> = vec![MachineValue::Undefined; (size_record.stack_size - 8) as usize / 4];
|
let mut machine_stack_half_layout: Vec<MachineValue> =
|
||||||
|
vec![MachineValue::Undefined; (size_record.stack_size - 8) as usize / 4];
|
||||||
let mut regs: Vec<(RegisterIndex, MachineValue)> = vec![];
|
let mut regs: Vec<(RegisterIndex, MachineValue)> = vec![];
|
||||||
let mut stack_constants: HashMap<usize, u64> = HashMap::new();
|
let mut stack_constants: HashMap<usize, u64> = HashMap::new();
|
||||||
|
|
||||||
@ -158,7 +172,9 @@ impl StackmapEntry {
|
|||||||
//eprintln!("XXX: {}", loc.offset_or_small_constant);
|
//eprintln!("XXX: {}", loc.offset_or_small_constant);
|
||||||
} else {
|
} else {
|
||||||
let stack_offset = ((-loc.offset_or_small_constant) / 4) as usize;
|
let stack_offset = ((-loc.offset_or_small_constant) / 4) as usize;
|
||||||
assert!(stack_offset > 0 && stack_offset <= machine_stack_half_layout.len());
|
assert!(
|
||||||
|
stack_offset > 0 && stack_offset <= machine_stack_half_layout.len()
|
||||||
|
);
|
||||||
machine_stack_half_layout[stack_offset - 1] = mv;
|
machine_stack_half_layout[stack_offset - 1] = mv;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -182,7 +198,8 @@ impl StackmapEntry {
|
|||||||
assert_eq!(wasm_stack.len(), self.stack_count);
|
assert_eq!(wasm_stack.len(), self.stack_count);
|
||||||
assert_eq!(wasm_locals.len(), self.local_count);
|
assert_eq!(wasm_locals.len(), self.local_count);
|
||||||
|
|
||||||
let mut machine_stack_layout: Vec<MachineValue> = Vec::with_capacity(machine_stack_half_layout.len() / 2);
|
let mut machine_stack_layout: Vec<MachineValue> =
|
||||||
|
Vec::with_capacity(machine_stack_half_layout.len() / 2);
|
||||||
|
|
||||||
for i in 0..machine_stack_half_layout.len() / 2 {
|
for i in 0..machine_stack_half_layout.len() / 2 {
|
||||||
let major = &machine_stack_half_layout[i * 2 + 1]; // mod 8 == 0
|
let major = &machine_stack_half_layout[i * 2 + 1]; // mod 8 == 0
|
||||||
@ -194,7 +211,10 @@ impl StackmapEntry {
|
|||||||
if only_major {
|
if only_major {
|
||||||
machine_stack_layout.push(major.clone());
|
machine_stack_layout.push(major.clone());
|
||||||
} else {
|
} else {
|
||||||
machine_stack_layout.push(MachineValue::TwoHalves(Box::new((major.clone(), minor.clone()))));
|
machine_stack_layout.push(MachineValue::TwoHalves(Box::new((
|
||||||
|
major.clone(),
|
||||||
|
minor.clone(),
|
||||||
|
))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,10 +239,19 @@ impl StackmapEntry {
|
|||||||
assert_eq!(fsm.locals, wasm_locals);
|
assert_eq!(fsm.locals, wasm_locals);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let target_offset = (size_record.function_address as usize)
|
|
||||||
.checked_sub(code_addr)
|
let end_offset = {
|
||||||
.unwrap()
|
if let Some(end) = end {
|
||||||
+ map_record.instruction_offset as usize;
|
let (end_entry, end_record) = end;
|
||||||
|
assert_eq!(end_entry.is_start, false);
|
||||||
|
assert_eq!(self.opcode_offset, end_entry.opcode_offset);
|
||||||
|
let end_offset = func_base_addr + end_record.instruction_offset as usize;
|
||||||
|
assert!(end_offset >= target_offset);
|
||||||
|
end_offset
|
||||||
|
} else {
|
||||||
|
target_offset + 1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
match self.kind {
|
match self.kind {
|
||||||
StackmapEntryKind::Loop => {
|
StackmapEntryKind::Loop => {
|
||||||
@ -231,6 +260,7 @@ impl StackmapEntry {
|
|||||||
fsm.loop_offsets.insert(
|
fsm.loop_offsets.insert(
|
||||||
target_offset,
|
target_offset,
|
||||||
OffsetInfo {
|
OffsetInfo {
|
||||||
|
end_offset,
|
||||||
diff_id,
|
diff_id,
|
||||||
activate_offset: target_offset,
|
activate_offset: target_offset,
|
||||||
},
|
},
|
||||||
@ -242,6 +272,7 @@ impl StackmapEntry {
|
|||||||
fsm.call_offsets.insert(
|
fsm.call_offsets.insert(
|
||||||
target_offset,
|
target_offset,
|
||||||
OffsetInfo {
|
OffsetInfo {
|
||||||
|
end_offset: end_offset + 1, // The return address is just after 'call' instruction. Offset by one here.
|
||||||
diff_id,
|
diff_id,
|
||||||
activate_offset: target_offset,
|
activate_offset: target_offset,
|
||||||
},
|
},
|
||||||
@ -253,6 +284,7 @@ impl StackmapEntry {
|
|||||||
fsm.trappable_offsets.insert(
|
fsm.trappable_offsets.insert(
|
||||||
target_offset,
|
target_offset,
|
||||||
OffsetInfo {
|
OffsetInfo {
|
||||||
|
end_offset,
|
||||||
diff_id,
|
diff_id,
|
||||||
activate_offset: target_offset,
|
activate_offset: target_offset,
|
||||||
},
|
},
|
||||||
@ -260,6 +292,14 @@ impl StackmapEntry {
|
|||||||
}
|
}
|
||||||
StackmapEntryKind::FunctionHeader => {
|
StackmapEntryKind::FunctionHeader => {
|
||||||
fsm.wasm_function_header_target_offset = Some(SuspendOffset::Loop(target_offset));
|
fsm.wasm_function_header_target_offset = Some(SuspendOffset::Loop(target_offset));
|
||||||
|
fsm.loop_offsets.insert(
|
||||||
|
target_offset,
|
||||||
|
OffsetInfo {
|
||||||
|
end_offset,
|
||||||
|
diff_id,
|
||||||
|
activate_offset: target_offset,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user