Allow a range of instruction offsets to be used in ip lookup.

This commit is contained in:
losfair 2019-07-30 22:25:15 +08:00
parent efb5277d1d
commit 0a7f95ee06
4 changed files with 191 additions and 30 deletions

View File

@ -12,13 +12,13 @@ use libc::{
};
use std::{
any::Any,
collections::BTreeMap,
ffi::{c_void, CString},
mem,
ops::Deref,
ptr::{self, NonNull},
slice, str,
sync::{Arc, Once},
collections::BTreeMap,
};
use wasmer_runtime_core::{
backend::{
@ -347,16 +347,31 @@ impl LLVMBackend {
map_records.insert(r.patchpoint_id as usize, r);
}
for (i, entry) in stackmaps.entries.iter().enumerate() {
if let Some(map_record) = map_records.get(&i) {
assert_eq!(i, map_record.patchpoint_id as usize);
let addr = local_func_id_to_addr[entry.local_function_id];
let size_record = *addr_to_size_record.get(&addr).expect("size_record not found");
entry.populate_msm(
for ((start_id, start_entry), (end_id, end_entry)) in stackmaps
.entries
.iter()
.enumerate()
.step_by(2)
.zip(stackmaps.entries.iter().enumerate().skip(1).step_by(2))
{
if let Some(map_record) = map_records.get(&start_id) {
assert_eq!(start_id, map_record.patchpoint_id as usize);
assert!(start_entry.is_start);
assert!(!end_entry.is_start);
let end_record = map_records.get(&end_id);
let addr = local_func_id_to_addr[start_entry.local_function_id];
let size_record = *addr_to_size_record
.get(&addr)
.expect("size_record not found");
start_entry.populate_msm(
code_ptr as usize,
&map,
size_record,
map_record,
end_record.map(|x| (end_entry, *x)),
&mut msm,
);
} else {

View File

@ -342,6 +342,38 @@ fn emit_stack_map(
stack_count: state.stack.len(),
opcode_offset,
value_semantics,
is_start: true,
});
}
fn finalize_opcode_stack_map(
intrinsics: &Intrinsics,
builder: &Builder,
local_function_id: usize,
target: &mut StackmapRegistry,
kind: StackmapEntryKind,
opcode_offset: usize,
) {
let stackmap_id = target.entries.len();
builder.build_call(
intrinsics.experimental_stackmap,
&[
intrinsics
.i64_ty
.const_int(stackmap_id as u64, false)
.as_basic_value_enum(),
intrinsics.i32_ty.const_int(0, false).as_basic_value_enum(),
],
"opcode_stack_map_end",
);
target.entries.push(StackmapEntry {
kind,
local_function_id,
local_count: 0,
stack_count: 0,
opcode_offset,
value_semantics: vec![],
is_start: false,
});
}
@ -448,7 +480,6 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
self.ctx = Some(ctx);
{
let mut state = &mut self.state;
let builder = self.builder.as_ref().unwrap();
@ -465,6 +496,14 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
&state,
::std::usize::MAX,
);
finalize_opcode_stack_map(
&intrinsics,
&builder,
self.index,
&mut *stackmaps,
StackmapEntryKind::FunctionHeader,
::std::usize::MAX,
);
}
Ok(())
@ -574,7 +613,19 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
&self.locals,
state,
offset,
)
);
let signal_mem = ctx.signal_mem();
let iv = builder
.build_store(signal_mem, context.i8_type().const_int(0 as u64, false));
iv.set_volatile(true);
finalize_opcode_stack_map(
intrinsics,
builder,
self.index,
&mut *stackmaps,
StackmapEntryKind::Loop,
offset,
);
}
state.push_loop(loop_body, loop_next, phis);
@ -850,10 +901,14 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
state,
offset,
);
builder.build_call(
intrinsics.trap,
&[],
"trap",
builder.build_call(intrinsics.trap, &[], "trap");
finalize_opcode_stack_map(
intrinsics,
builder,
self.index,
&mut *stackmaps,
StackmapEntryKind::Trappable,
offset + 1,
);
}
@ -1008,6 +1063,17 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
)
}
let call_site = builder.build_call(func_ptr, &params, &state.var_name());
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
finalize_opcode_stack_map(
intrinsics,
builder,
self.index,
&mut *stackmaps,
StackmapEntryKind::Call,
offset,
)
}
if let Some(basic_value) = call_site.try_as_basic_value().left() {
match func_sig.returns().len() {
@ -1186,6 +1252,17 @@ impl FunctionCodeGenerator<CodegenError> for LLVMFunctionCodeGenerator {
)
}
let call_site = builder.build_call(typed_func_ptr, &args, "indirect_call");
if let Some(offset) = opcode_offset {
let mut stackmaps = self.stackmaps.borrow_mut();
finalize_opcode_stack_map(
intrinsics,
builder,
self.index,
&mut *stackmaps,
StackmapEntryKind::Call,
offset,
)
}
match wasmer_fn_sig.returns() {
[] => {}
@ -2679,8 +2756,12 @@ impl ModuleCodeGenerator<LLVMFunctionCodeGenerator, LLVMBackend, CodegenError>
let stackmaps = self.stackmaps.borrow();
let (backend, cache_gen) =
LLVMBackend::new(self.module, self.intrinsics.take().unwrap(), &*stackmaps, module_info);
let (backend, cache_gen) = LLVMBackend::new(
self.module,
self.intrinsics.take().unwrap(),
&*stackmaps,
module_info,
);
Ok((backend, Box::new(cache_gen)))
}

View File

@ -445,6 +445,8 @@ pub struct CtxType<'a> {
info: &'a ModuleInfo,
cache_builder: Builder,
cached_signal_mem: Option<PointerValue>,
cached_memories: HashMap<MemoryIndex, MemoryCache>,
cached_tables: HashMap<TableIndex, TableCache>,
cached_sigindices: HashMap<SigIndex, IntValue>,
@ -470,6 +472,8 @@ impl<'a> CtxType<'a> {
info,
cache_builder,
cached_signal_mem: None,
cached_memories: HashMap::new(),
cached_tables: HashMap::new(),
cached_sigindices: HashMap::new(),
@ -484,6 +488,27 @@ impl<'a> CtxType<'a> {
self.ctx_ptr_value.as_basic_value_enum()
}
pub fn signal_mem(&mut self) -> PointerValue {
if let Some(x) = self.cached_signal_mem {
return x;
}
let (ctx_ptr_value, cache_builder) = (self.ctx_ptr_value, &self.cache_builder);
let ptr_ptr = unsafe {
cache_builder.build_struct_gep(
ctx_ptr_value,
offset_to_index(Ctx::offset_interrupt_signal_mem()),
"interrupt_signal_mem_ptr",
)
};
let ptr = cache_builder
.build_load(ptr_ptr, "interrupt_signal_mem")
.into_pointer_value();
self.cached_signal_mem = Some(ptr);
ptr
}
pub fn memory(&mut self, index: MemoryIndex, intrinsics: &Intrinsics) -> MemoryCache {
let (cached_memories, info, ctx_ptr_value, cache_builder) = (
&mut self.cached_memories,

View File

@ -22,6 +22,7 @@ pub struct StackmapEntry {
pub value_semantics: Vec<ValueSemantic>,
pub local_count: usize,
pub stack_count: usize,
pub is_start: bool,
}
#[derive(Debug, Clone)]
@ -30,7 +31,7 @@ pub enum ValueSemantic {
WasmStack(usize),
}
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum StackmapEntryKind {
FunctionHeader,
Loop,
@ -72,6 +73,7 @@ impl StackmapEntry {
llvm_map: &StackMap,
size_record: &StkSizeRecord,
map_record: &StkMapRecord,
end: Option<(&StackmapEntry, &StkMapRecord)>,
msm: &mut ModuleStateMap,
) {
#[derive(Clone, Debug)]
@ -80,12 +82,23 @@ impl StackmapEntry {
Constant(u64),
}
let fsm = msm
.local_functions
.entry(self.local_function_id)
.or_insert_with(|| {
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![])
});
let func_base_addr = (size_record.function_address as usize)
.checked_sub(code_addr)
.unwrap();
let target_offset = func_base_addr + map_record.instruction_offset as usize;
if msm.local_functions.len() == self.local_function_id {
assert_eq!(self.kind, StackmapEntryKind::FunctionHeader);
msm.local_functions.insert(
target_offset,
FunctionStateMap::new(new_machine_state(), self.local_function_id, 0, vec![]),
);
} else if msm.local_functions.len() == self.local_function_id + 1 {
} else {
panic!("unordered local functions");
}
let (_, fsm) = msm.local_functions.iter_mut().last().unwrap();
assert_eq!(self.value_semantics.len(), map_record.locations.len());
@ -94,7 +107,8 @@ impl StackmapEntry {
assert!(size_record.stack_size % 16 == 8);
// Layout begins just below saved rbp. (push rbp; mov rbp, rsp)
let mut machine_stack_half_layout: Vec<MachineValue> = vec![MachineValue::Undefined; (size_record.stack_size - 8) as usize / 4];
let mut machine_stack_half_layout: Vec<MachineValue> =
vec![MachineValue::Undefined; (size_record.stack_size - 8) as usize / 4];
let mut regs: Vec<(RegisterIndex, MachineValue)> = vec![];
let mut stack_constants: HashMap<usize, u64> = HashMap::new();
@ -158,7 +172,9 @@ impl StackmapEntry {
//eprintln!("XXX: {}", loc.offset_or_small_constant);
} else {
let stack_offset = ((-loc.offset_or_small_constant) / 4) as usize;
assert!(stack_offset > 0 && stack_offset <= machine_stack_half_layout.len());
assert!(
stack_offset > 0 && stack_offset <= machine_stack_half_layout.len()
);
machine_stack_half_layout[stack_offset - 1] = mv;
}
}
@ -182,7 +198,8 @@ impl StackmapEntry {
assert_eq!(wasm_stack.len(), self.stack_count);
assert_eq!(wasm_locals.len(), self.local_count);
let mut machine_stack_layout: Vec<MachineValue> = Vec::with_capacity(machine_stack_half_layout.len() / 2);
let mut machine_stack_layout: Vec<MachineValue> =
Vec::with_capacity(machine_stack_half_layout.len() / 2);
for i in 0..machine_stack_half_layout.len() / 2 {
let major = &machine_stack_half_layout[i * 2 + 1]; // mod 8 == 0
@ -194,7 +211,10 @@ impl StackmapEntry {
if only_major {
machine_stack_layout.push(major.clone());
} else {
machine_stack_layout.push(MachineValue::TwoHalves(Box::new((major.clone(), minor.clone()))));
machine_stack_layout.push(MachineValue::TwoHalves(Box::new((
major.clone(),
minor.clone(),
))));
}
}
@ -219,10 +239,19 @@ impl StackmapEntry {
assert_eq!(fsm.locals, wasm_locals);
}
}
let target_offset = (size_record.function_address as usize)
.checked_sub(code_addr)
.unwrap()
+ map_record.instruction_offset as usize;
let end_offset = {
if let Some(end) = end {
let (end_entry, end_record) = end;
assert_eq!(end_entry.is_start, false);
assert_eq!(self.opcode_offset, end_entry.opcode_offset);
let end_offset = func_base_addr + end_record.instruction_offset as usize;
assert!(end_offset >= target_offset);
end_offset
} else {
target_offset + 1
}
};
match self.kind {
StackmapEntryKind::Loop => {
@ -231,6 +260,7 @@ impl StackmapEntry {
fsm.loop_offsets.insert(
target_offset,
OffsetInfo {
end_offset,
diff_id,
activate_offset: target_offset,
},
@ -242,6 +272,7 @@ impl StackmapEntry {
fsm.call_offsets.insert(
target_offset,
OffsetInfo {
end_offset: end_offset + 1, // The return address is just after 'call' instruction. Offset by one here.
diff_id,
activate_offset: target_offset,
},
@ -253,6 +284,7 @@ impl StackmapEntry {
fsm.trappable_offsets.insert(
target_offset,
OffsetInfo {
end_offset,
diff_id,
activate_offset: target_offset,
},
@ -260,6 +292,14 @@ impl StackmapEntry {
}
StackmapEntryKind::FunctionHeader => {
fsm.wasm_function_header_target_offset = Some(SuspendOffset::Loop(target_offset));
fsm.loop_offsets.insert(
target_offset,
OffsetInfo {
end_offset,
diff_id,
activate_offset: target_offset,
},
);
}
}
}