Update with list IR from walrus

This commit updates `wasm-bindgen` to the latest version of `walrus`
which transforms all internal IR representations to a list-based IR
instead of a tree-based IR. This isn't a major change other than
cosmetic for `wasm-bindgen` itself, but involves a lot of changes to the
threads/anyref passes.

This commit also updates our CI configuration to actually run all the
anyref tests on CI. This is done by downloading a nightly build of
node.js which is theorized to continue to be there for awhile until the
full support makes its way into releases.
This commit is contained in:
Alex Crichton 2019-08-12 10:49:00 -07:00
parent 1d0c333a2b
commit ad34fa29d8
8 changed files with 323 additions and 371 deletions

View File

@ -92,3 +92,4 @@ wasm-bindgen = { path = '.' }
wasm-bindgen-futures = { path = 'crates/futures' } wasm-bindgen-futures = { path = 'crates/futures' }
js-sys = { path = 'crates/js-sys' } js-sys = { path = 'crates/js-sys' }
web-sys = { path = 'crates/web-sys' } web-sys = { path = 'crates/web-sys' }
wasm-webidl-bindings = { git = 'https://github.com/alexcrichton/wasm-webidl-bindings', branch = 'update-walrus' }

View File

@ -18,8 +18,6 @@ jobs:
displayName: "Crate test suite" displayName: "Crate test suite"
- script: WASM_BINDGEN_NO_DEBUG=1 cargo test --target wasm32-unknown-unknown - script: WASM_BINDGEN_NO_DEBUG=1 cargo test --target wasm32-unknown-unknown
displayName: "Crate test suite (no debug)" displayName: "Crate test suite (no debug)"
- script: NODE_ARGS=/dev/null WASM_BINDGEN_ANYREF=1 cargo test --target wasm32-unknown-unknown --test wasm
displayName: "Anyref test suite builds"
- script: cargo test --target wasm32-unknown-unknown --features serde-serialize - script: cargo test --target wasm32-unknown-unknown --features serde-serialize
displayName: "Crate test suite (with serde)" displayName: "Crate test suite (with serde)"
- script: cargo test --target wasm32-unknown-unknown --features enable-interning - script: cargo test --target wasm32-unknown-unknown --features enable-interning
@ -30,6 +28,19 @@ jobs:
displayName: "Futures test suite on native" displayName: "Futures test suite on native"
- script: cargo test -p wasm-bindgen-futures --target wasm32-unknown-unknown - script: cargo test -p wasm-bindgen-futures --target wasm32-unknown-unknown
displayName: "Futures test suite on wasm" displayName: "Futures test suite on wasm"
- script: |
set -e
curl https://nodejs.org/download/nightly/v13.0.0-nightly2019081215b2d13310/node-v13.0.0-nightly2019081215b2d13310-linux-x64.tar.xz | tar xJf -
echo "##vso[task.prependpath]$PWD/node-v13.0.0-nightly2019081215b2d13310-linux-x64/bin"
echo "##vso[task.setvariable variable=NODE_ARGS]--experimental-wasm-anyref,--experimental-wasm-bulk_memory"
echo "##vso[task.setvariable variable=WASM_BINDGEN_ANYREF]1"
displayName: "Install a custom node.js and configure anyref"
- script: cargo test --target wasm32-unknown-unknown --test wasm
displayName: "(anyref) Crate test suite"
- script: WASM_BINDGEN_NO_DEBUG=1 cargo test --target wasm32-unknown-unknown --test wasm
displayName: "(anyref) Crate test suite (no debug)"
- script: cargo test --target wasm32-unknown-unknown --features serde-serialize --test wasm
displayName: "(anyref) Crate test suite (with serde)"
- job: test_wasm_bindgen_windows - job: test_wasm_bindgen_windows
displayName: "Run wasm-bindgen crate tests (Windows)" displayName: "Run wasm-bindgen crate tests (Windows)"

View File

@ -18,9 +18,8 @@
use failure::{bail, format_err, Error}; use failure::{bail, format_err, Error};
use std::cmp; use std::cmp;
use std::collections::{BTreeMap, HashMap, HashSet}; use std::collections::{BTreeMap, HashMap, HashSet};
use std::mem;
use walrus::ir::*; use walrus::ir::*;
use walrus::{ExportId, ImportId}; use walrus::{ExportId, ImportId, TypeId};
use walrus::{FunctionId, GlobalId, InitExpr, Module, TableId, ValType}; use walrus::{FunctionId, GlobalId, InitExpr, Module, TableId, ValType};
// must be kept in sync with src/lib.rs and ANYREF_HEAP_START // must be kept in sync with src/lib.rs and ANYREF_HEAP_START
@ -198,19 +197,20 @@ impl Context {
// (tee_local 1 (call $heap_alloc)) // (tee_local 1 (call $heap_alloc))
// (table.get (local.get 0))) // (table.get (local.get 0)))
// (local.get 1)) // (local.get 1))
let mut builder = walrus::FunctionBuilder::new(); let mut builder =
walrus::FunctionBuilder::new(&mut module.types, &[ValType::I32], &[ValType::I32]);
let arg = module.locals.add(ValType::I32); let arg = module.locals.add(ValType::I32);
let local = module.locals.add(ValType::I32); let local = module.locals.add(ValType::I32);
let alloc = builder.call(heap_alloc, Box::new([])); let mut body = builder.func_body();
let tee = builder.local_tee(local, alloc); body.call(heap_alloc)
let get_arg = builder.local_get(arg); .local_tee(local)
let get_table = builder.table_get(table, get_arg); .local_get(arg)
let set_table = builder.table_set(table, tee, get_table); .table_get(table)
let get_local = builder.local_get(local); .table_set(table)
.local_get(local);
let ty = module.types.add(&[ValType::I32], &[ValType::I32]); let clone_ref = builder.finish(vec![arg], &mut module.funcs);
let clone_ref = builder.finish(ty, vec![arg], vec![set_table, get_local], module);
let name = "__wbindgen_object_clone_ref".to_string(); let name = "__wbindgen_object_clone_ref".to_string();
module.funcs.get_mut(clone_ref).name = Some(name); module.funcs.get_mut(clone_ref).name = Some(name);
@ -309,7 +309,7 @@ impl Transform<'_> {
None => continue, None => continue,
}; };
let shim = self.append_shim( let (shim, anyref_ty) = self.append_shim(
f, f,
&import.name, &import.name,
func, func,
@ -318,6 +318,10 @@ impl Transform<'_> {
&mut module.locals, &mut module.locals,
); );
self.import_map.insert(f, shim); self.import_map.insert(f, shim);
match &mut module.funcs.get_mut(f).kind {
walrus::FunctionKind::Import(f) => f.ty = anyref_ty,
_ => unreachable!(),
}
} }
} }
@ -332,7 +336,7 @@ impl Transform<'_> {
Some(s) => s, Some(s) => s,
None => continue, None => continue,
}; };
let shim = self.append_shim( let (shim, _anyref_ty) = self.append_shim(
f, f,
&export.name, &export.name,
function, function,
@ -362,7 +366,7 @@ impl Transform<'_> {
// which places elements at the end. // which places elements at the end.
while let Some((idx, function)) = self.cx.elements.remove(&(kind.elements.len() as u32)) { while let Some((idx, function)) = self.cx.elements.remove(&(kind.elements.len() as u32)) {
let target = kind.elements[idx as usize].unwrap(); let target = kind.elements[idx as usize].unwrap();
let shim = self.append_shim( let (shim, _anyref_ty) = self.append_shim(
target, target,
&format!("closure{}", idx), &format!("closure{}", idx),
function, function,
@ -390,15 +394,17 @@ impl Transform<'_> {
types: &mut walrus::ModuleTypes, types: &mut walrus::ModuleTypes,
funcs: &mut walrus::ModuleFunctions, funcs: &mut walrus::ModuleFunctions,
locals: &mut walrus::ModuleLocals, locals: &mut walrus::ModuleLocals,
) -> FunctionId { ) -> (FunctionId, TypeId) {
let target = funcs.get_mut(shim_target); let target = funcs.get_mut(shim_target);
let (is_export, ty) = match &mut target.kind { let (is_export, ty) = match &target.kind {
walrus::FunctionKind::Import(f) => (false, &mut f.ty), walrus::FunctionKind::Import(f) => (false, f.ty),
walrus::FunctionKind::Local(f) => (true, &mut f.ty), walrus::FunctionKind::Local(f) => (true, f.ty()),
_ => unreachable!(), _ => unreachable!(),
}; };
let target_ty = types.get(*ty); let target_ty = types.get(ty);
let target_ty_params = target_ty.params().to_vec();
let target_ty_results = target_ty.results().to_vec();
// Learn about the various operations we're doing up front. Afterwards // Learn about the various operations we're doing up front. Afterwards
// we'll have a better idea bout what sort of code we're gonna be // we'll have a better idea bout what sort of code we're gonna be
@ -452,14 +458,22 @@ impl Transform<'_> {
// If we're an import, then our shim is what the Rust code calls, which // If we're an import, then our shim is what the Rust code calls, which
// means it'll have the original signature. The existing import's // means it'll have the original signature. The existing import's
// signature, however, is transformed to be an anyref signature. // signature, however, is transformed to be an anyref signature.
let shim_ty = if is_export { let shim_ty = if is_export { anyref_ty } else { ty };
anyref_ty
} else {
mem::replace(ty, anyref_ty)
};
let mut builder = walrus::FunctionBuilder::new(); let mut builder = walrus::FunctionBuilder::new(
let mut before = Vec::new(); types,
if is_export {
&param_tys
} else {
&target_ty_params
},
if is_export {
&new_ret
} else {
&target_ty_results
},
);
let mut body = builder.func_body();
let params = types let params = types
.get(shim_ty) .get(shim_ty)
.params() .params()
@ -476,60 +490,63 @@ impl Transform<'_> {
// Update our stack pointer if there's any borrowed anyref objects. // Update our stack pointer if there's any borrowed anyref objects.
if anyref_stack > 0 { if anyref_stack > 0 {
let sp = builder.global_get(self.stack_pointer); body.global_get(self.stack_pointer)
let size = builder.const_(Value::I32(anyref_stack)); .const_(Value::I32(anyref_stack))
let new_sp = builder.binop(BinaryOp::I32Sub, sp, size); .binop(BinaryOp::I32Sub)
let tee = builder.local_tee(fp, new_sp); .local_tee(fp)
before.push(builder.global_set(self.stack_pointer, tee)); .global_set(self.stack_pointer);
} }
let mut next_stack_offset = 0; let mut next_stack_offset = 0;
let mut args = Vec::new();
for (i, convert) in param_convert.iter().enumerate() { for (i, convert) in param_convert.iter().enumerate() {
let local = builder.local_get(params[i]); match *convert {
args.push(match *convert { Convert::None => {
Convert::None => local, body.local_get(params[i]);
}
Convert::Load { owned: true } => { Convert::Load { owned: true } => {
// load the anyref onto the stack, then afterwards // load the anyref onto the stack, then afterwards
// deallocate our index, leaving the anyref on the stack. // deallocate our index, leaving the anyref on the stack.
let get = builder.table_get(self.table, local); body.local_get(params[i])
let free = builder.call(self.heap_dealloc, Box::new([local])); .table_get(self.table)
builder.with_side_effects(Vec::new(), get, vec![free]) .local_get(params[i])
.call(self.heap_dealloc);
}
Convert::Load { owned: false } => {
body.local_get(params[i]).table_get(self.table);
} }
Convert::Load { owned: false } => builder.table_get(self.table, local),
Convert::Store { owned: true } => { Convert::Store { owned: true } => {
// Allocate space for the anyref, store it, and then leave // Allocate space for the anyref, store it, and then leave
// the index of the allocated anyref on the stack. // the index of the allocated anyref on the stack.
let alloc = builder.call(self.heap_alloc, Box::new([])); body.call(self.heap_alloc)
let tee = builder.local_tee(scratch_i32, alloc); .local_tee(scratch_i32)
let store = builder.table_set(self.table, tee, local); .local_get(params[i])
let get = builder.local_get(scratch_i32); .table_set(self.table)
builder.with_side_effects(vec![store], get, Vec::new()) .local_get(scratch_i32);
} }
Convert::Store { owned: false } => { Convert::Store { owned: false } => {
// Store an anyref at an offset from our function's stack // Store an anyref at an offset from our function's stack
// pointer frame. // pointer frame.
let get_fp = builder.local_get(fp); body.local_get(fp);
let (index, idx_local) = if next_stack_offset == 0 { let idx_local = if next_stack_offset == 0 {
(get_fp, fp) fp
} else { } else {
let rhs = builder.i32_const(next_stack_offset); body.i32_const(next_stack_offset)
let add = builder.binop(BinaryOp::I32Add, get_fp, rhs); .binop(BinaryOp::I32Add)
(builder.local_tee(scratch_i32, add), scratch_i32) .local_tee(scratch_i32);
scratch_i32
}; };
next_stack_offset += 1; next_stack_offset += 1;
let store = builder.table_set(self.table, index, local); body.local_get(params[i])
let get = builder.local_get(idx_local); .table_set(self.table)
builder.with_side_effects(vec![store], get, Vec::new()) .local_get(idx_local);
}
} }
});
} }
// Now that we've converted all the arguments, call the original // Now that we've converted all the arguments, call the original
// function. This may be either an import or an export which we're // function. This may be either an import or an export which we're
// wrapping. // wrapping.
let mut result = builder.call(shim_target, args.into_boxed_slice()); body.call(shim_target);
let mut after = Vec::new();
// If an anyref value is returned, then we need to be sure to apply // If an anyref value is returned, then we need to be sure to apply
// special treatment to convert it to an i32 as well. Note that only // special treatment to convert it to an i32 as well. Note that only
@ -540,20 +557,20 @@ impl Transform<'_> {
// We're an export so we have an i32 on the stack and need to // We're an export so we have an i32 on the stack and need to
// convert it to an anyref, basically by doing the same as an // convert it to an anyref, basically by doing the same as an
// owned load above: get the value then deallocate our slot. // owned load above: get the value then deallocate our slot.
let tee = builder.local_tee(scratch_i32, result); body.local_tee(scratch_i32)
result = builder.table_get(self.table, tee); .table_get(self.table)
let get_local = builder.local_get(scratch_i32); .local_get(scratch_i32)
after.push(builder.call(self.heap_dealloc, Box::new([get_local]))); .call(self.heap_dealloc);
} else { } else {
// Imports are the opposite, we have any anyref on the stack // Imports are the opposite, we have any anyref on the stack
// and convert it to an i32 by allocating space for it and // and convert it to an i32 by allocating space for it and
// storing it there. // storing it there.
before.push(builder.local_set(scratch_anyref, result)); body.local_set(scratch_anyref)
let alloc = builder.call(self.heap_alloc, Box::new([])); .call(self.heap_alloc)
let tee = builder.local_tee(scratch_i32, alloc); .local_tee(scratch_i32)
let get = builder.local_get(scratch_anyref); .local_get(scratch_anyref)
before.push(builder.table_set(self.table, tee, get)); .table_set(self.table)
result = builder.local_get(scratch_i32); .local_get(scratch_i32);
} }
} }
@ -567,32 +584,28 @@ impl Transform<'_> {
// TODO: use `table.fill` once that's spec'd // TODO: use `table.fill` once that's spec'd
if anyref_stack > 0 { if anyref_stack > 0 {
for i in 0..anyref_stack { for i in 0..anyref_stack {
let get_fp = builder.local_get(fp); body.local_get(fp);
let index = if i > 0 { if i > 0 {
let offset = builder.i32_const(i); body.i32_const(i).binop(BinaryOp::I32Add);
builder.binop(BinaryOp::I32Add, get_fp, offset) }
} else { body.ref_null();
get_fp body.table_set(self.table);
};
let null = builder.ref_null();
after.push(builder.table_set(self.table, index, null));
} }
let get_fp = builder.local_get(fp); body.local_get(fp)
let size = builder.i32_const(anyref_stack); .i32_const(anyref_stack)
let new_sp = builder.binop(BinaryOp::I32Add, get_fp, size); .binop(BinaryOp::I32Add)
after.push(builder.global_set(self.stack_pointer, new_sp)); .global_set(self.stack_pointer);
} }
// Create the final expression node and then finish the function builder // Create the final expression node and then finish the function builder
// with a fresh type we've been calculating so far. Give the function a // with a fresh type we've been calculating so far. Give the function a
// nice name for debugging and then we're good to go! // nice name for debugging and then we're good to go!
let expr = builder.with_side_effects(before, result, after); let id = builder.finish(params, funcs);
let id = builder.finish_parts(shim_ty, params, vec![expr], types, funcs);
let name = format!("{}_anyref_shim", name); let name = format!("{}_anyref_shim", name);
funcs.get_mut(id).name = Some(name); funcs.get_mut(id).name = Some(name);
self.shims.insert(id); self.shims.insert(id);
return id; (id, anyref_ty)
} }
fn rewrite_calls(&mut self, module: &mut Module) { fn rewrite_calls(&mut self, module: &mut Module) {
@ -600,67 +613,59 @@ impl Transform<'_> {
if self.shims.contains(&id) { if self.shims.contains(&id) {
continue; continue;
} }
let mut entry = func.entry_block(); let entry = func.entry_block();
Rewrite { dfs_pre_order_mut(&mut Rewrite { xform: self }, func, entry);
func,
xform: self,
replace: None,
}
.visit_block_id_mut(&mut entry);
} }
struct Rewrite<'a, 'b> { struct Rewrite<'a, 'b> {
func: &'a mut walrus::LocalFunction,
xform: &'a Transform<'b>, xform: &'a Transform<'b>,
replace: Option<ExprId>,
} }
impl VisitorMut for Rewrite<'_, '_> { impl VisitorMut for Rewrite<'_, '_> {
fn local_function_mut(&mut self) -> &mut walrus::LocalFunction { fn start_instr_seq_mut(&mut self, seq: &mut InstrSeq) {
self.func for i in (0..seq.instrs.len()).rev() {
} let call = match &mut seq.instrs[i] {
Instr::Call(call) => call,
fn visit_expr_id_mut(&mut self, expr: &mut ExprId) { _ => continue,
expr.visit_mut(self); };
if let Some(id) = self.replace.take() { let intrinsic = match self.xform.intrinsic_map.get(&call.func) {
*expr = id;
}
}
fn visit_call_mut(&mut self, e: &mut Call) {
e.visit_mut(self);
let intrinsic = match self.xform.intrinsic_map.get(&e.func) {
Some(f) => f, Some(f) => f,
None => { None => {
// If this wasn't a call of an intrinsic, but it was a // If this wasn't a call of an intrinsic, but it was a
// call of one of our old import functions then we // call of one of our old import functions then we
// switch the functions we're calling here. // switch the functions we're calling here.
if let Some(f) = self.xform.import_map.get(&e.func) { if let Some(f) = self.xform.import_map.get(&call.func) {
e.func = *f; call.func = *f;
} }
return; continue;
} }
}; };
let builder = self.func.builder_mut();
match intrinsic { match intrinsic {
Intrinsic::TableGrow => { Intrinsic::TableGrow => {
assert_eq!(e.args.len(), 1); // Switch this to a `table.grow` instruction...
let delta = e.args[0]; seq.instrs[i] = TableGrow {
let null = builder.ref_null(); table: self.xform.table,
let grow = builder.table_grow(self.xform.table, delta, null); }
self.replace = Some(grow); .into();
// ... and then insert a `ref.null` before the
// preceding instruction as the value to grow the
// table with.
seq.instrs.insert(i - 1, RefNull {}.into());
} }
Intrinsic::TableSetNull => { Intrinsic::TableSetNull => {
assert_eq!(e.args.len(), 1); // Switch this to a `table.set` instruction...
let index = e.args[0]; seq.instrs[i] = TableSet {
let null = builder.ref_null(); table: self.xform.table,
let set = builder.table_set(self.xform.table, index, null); }
self.replace = Some(set); .into();
// ... and then insert a `ref.null` as the
// preceding instruction
seq.instrs.insert(i, RefNull {}.into());
}
Intrinsic::DropRef => call.func = self.xform.heap_dealloc,
Intrinsic::CloneRef => call.func = self.xform.clone_ref,
} }
Intrinsic::DropRef => e.func = self.xform.heap_dealloc,
Intrinsic::CloneRef => e.func = self.xform.clone_ref,
} }
} }
} }

View File

@ -18,9 +18,9 @@ log = "0.4"
rustc-demangle = "0.1.13" rustc-demangle = "0.1.13"
serde_json = "1.0" serde_json = "1.0"
tempfile = "3.0" tempfile = "3.0"
walrus = "0.10.0" walrus = "0.11.0"
wasm-bindgen-anyref-xform = { path = '../anyref-xform', version = '=0.2.48' } wasm-bindgen-anyref-xform = { path = '../anyref-xform', version = '=0.2.48' }
wasm-bindgen-shared = { path = "../shared", version = '=0.2.48' } wasm-bindgen-shared = { path = "../shared", version = '=0.2.48' }
wasm-bindgen-threads-xform = { path = '../threads-xform', version = '=0.2.48' } wasm-bindgen-threads-xform = { path = '../threads-xform', version = '=0.2.48' }
wasm-bindgen-wasm-interpreter = { path = "../wasm-interpreter", version = '=0.2.48' } wasm-bindgen-wasm-interpreter = { path = "../wasm-interpreter", version = '=0.2.48' }
wasm-webidl-bindings = "0.3.0" wasm-webidl-bindings = "0.4.0"

View File

@ -14,9 +14,8 @@ use crate::descriptor::{Closure, Descriptor};
use failure::Error; use failure::Error;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::mem;
use walrus::ImportId; use walrus::ImportId;
use walrus::{CustomSection, FunctionId, LocalFunction, Module, TypedCustomSectionId}; use walrus::{CustomSection, FunctionId, Module, TypedCustomSectionId};
use wasm_bindgen_wasm_interpreter::Interpreter; use wasm_bindgen_wasm_interpreter::Interpreter;
#[derive(Default, Debug)] #[derive(Default, Debug)]
@ -112,19 +111,16 @@ impl WasmBindgenDescriptorsSection {
let mut element_removal_list = HashSet::new(); let mut element_removal_list = HashSet::new();
let mut func_to_descriptor = HashMap::new(); let mut func_to_descriptor = HashMap::new();
for (id, local) in module.funcs.iter_local() { for (id, local) in module.funcs.iter_local() {
let entry = local.entry_block();
let mut find = FindDescribeClosure { let mut find = FindDescribeClosure {
func: local,
wbindgen_describe_closure, wbindgen_describe_closure,
cur: entry.into(), found: false,
call: None,
}; };
find.visit_block_id(&entry); dfs_in_order(&mut find, local, local.entry_block());
if let Some(call) = find.call { if find.found {
let descriptor = interpreter let descriptor = interpreter
.interpret_closure_descriptor(id, module, &mut element_removal_list) .interpret_closure_descriptor(id, module, &mut element_removal_list)
.unwrap(); .unwrap();
func_to_descriptor.insert(id, (call, Descriptor::decode(descriptor))); func_to_descriptor.insert(id, Descriptor::decode(descriptor));
} }
} }
@ -150,7 +146,7 @@ impl WasmBindgenDescriptorsSection {
// freshly manufactured import. Save off the type of this import in // freshly manufactured import. Save off the type of this import in
// ourselves, and then we're good to go. // ourselves, and then we're good to go.
let ty = module.funcs.get(wbindgen_describe_closure).ty(); let ty = module.funcs.get(wbindgen_describe_closure).ty();
for (func, (call_instr, descriptor)) in func_to_descriptor { for (func, descriptor) in func_to_descriptor {
let import_name = format!("__wbindgen_closure_wrapper{}", func.index()); let import_name = format!("__wbindgen_closure_wrapper{}", func.index());
let (id, import_id) = let (id, import_id) =
module.add_import_func("__wbindgen_placeholder__", &import_name, ty); module.add_import_func("__wbindgen_placeholder__", &import_name, ty);
@ -160,37 +156,42 @@ impl WasmBindgenDescriptorsSection {
walrus::FunctionKind::Local(l) => l, walrus::FunctionKind::Local(l) => l,
_ => unreachable!(), _ => unreachable!(),
}; };
let call = local.get_mut(call_instr).unwrap_call_mut(); let entry = local.entry_block();
assert_eq!(call.func, wbindgen_describe_closure); dfs_pre_order_mut(
call.func = id; &mut UpdateDescribeClosure {
wbindgen_describe_closure,
replacement: id,
},
local,
entry,
);
self.closure_imports self.closure_imports
.insert(import_id, descriptor.unwrap_closure()); .insert(import_id, descriptor.unwrap_closure());
} }
return Ok(()); return Ok(());
struct FindDescribeClosure<'a> { struct FindDescribeClosure {
func: &'a LocalFunction,
wbindgen_describe_closure: FunctionId, wbindgen_describe_closure: FunctionId,
cur: ExprId, found: bool,
call: Option<ExprId>,
}
impl<'a> Visitor<'a> for FindDescribeClosure<'a> {
fn local_function(&self) -> &'a LocalFunction {
self.func
}
fn visit_expr_id(&mut self, id: &ExprId) {
let prev = mem::replace(&mut self.cur, *id);
id.visit(self);
self.cur = prev;
} }
impl<'a> Visitor<'a> for FindDescribeClosure {
fn visit_call(&mut self, call: &Call) { fn visit_call(&mut self, call: &Call) {
call.visit(self);
if call.func == self.wbindgen_describe_closure { if call.func == self.wbindgen_describe_closure {
assert!(self.call.is_none()); self.found = true;
self.call = Some(self.cur); }
}
}
struct UpdateDescribeClosure {
wbindgen_describe_closure: FunctionId,
replacement: FunctionId,
}
impl<'a> VisitorMut for UpdateDescribeClosure {
fn visit_call_mut(&mut self, call: &mut Call) {
if call.func == self.wbindgen_describe_closure {
call.func = self.replacement;
} }
} }
} }

View File

@ -644,10 +644,9 @@ impl<'a> Context<'a> {
self.module.start = Some(match self.module.start { self.module.start = Some(match self.module.start {
Some(prev_start) => { Some(prev_start) => {
let mut builder = walrus::FunctionBuilder::new(); let mut builder = walrus::FunctionBuilder::new(&mut self.module.types, &[], &[]);
let call_init = builder.call(import, Box::new([])); builder.func_body().call(import).call(prev_start);
let call_prev = builder.call(prev_start, Box::new([])); builder.finish(Vec::new(), &mut self.module.funcs)
builder.finish(ty, Vec::new(), vec![call_init, call_prev], self.module)
} }
None => import, None => import,
}); });
@ -827,11 +826,9 @@ impl<'a> Context<'a> {
// because the start function currently only shows up when it's injected // because the start function currently only shows up when it's injected
// through thread/anyref transforms. These injected start functions need // through thread/anyref transforms. These injected start functions need
// to happen before user code, so we always schedule them first. // to happen before user code, so we always schedule them first.
let mut builder = walrus::FunctionBuilder::new(); let mut builder = walrus::FunctionBuilder::new(&mut self.module.types, &[], &[]);
let call1 = builder.call(prev_start, Box::new([])); builder.func_body().call(prev_start).call(id);
let call2 = builder.call(id, Box::new([])); let new_start = builder.finish(Vec::new(), &mut self.module.funcs);
let ty = self.module.funcs.get(id).ty();
let new_start = builder.finish(ty, Vec::new(), vec![call1, call2], self.module);
self.module.start = Some(new_start); self.module.start = Some(new_start);
Ok(()) Ok(())
} }

View File

@ -5,7 +5,7 @@ use std::mem;
use failure::{bail, format_err, Error}; use failure::{bail, format_err, Error};
use walrus::ir::Value; use walrus::ir::Value;
use walrus::{DataId, FunctionId, InitExpr, LocalFunction, ValType}; use walrus::{DataId, FunctionId, InitExpr, ValType};
use walrus::{ExportItem, GlobalId, GlobalKind, ImportKind, MemoryId, Module}; use walrus::{ExportItem, GlobalId, GlobalKind, ImportKind, MemoryId, Module};
const PAGE_SIZE: u32 = 1 << 16; const PAGE_SIZE: u32 = 1 << 16;
@ -365,13 +365,13 @@ fn inject_start(
use walrus::ir::*; use walrus::ir::*;
assert!(stack_size % PAGE_SIZE == 0); assert!(stack_size % PAGE_SIZE == 0);
let mut builder = walrus::FunctionBuilder::new(); let mut builder = walrus::FunctionBuilder::new(&mut module.types, &[], &[]);
let mut exprs = Vec::new();
let local = module.locals.add(ValType::I32); let local = module.locals.add(ValType::I32);
let mut body = builder.func_body();
let addr = builder.i32_const(addr as i32); body.i32_const(addr as i32)
let one = builder.i32_const(1); .i32_const(1)
let thread_id = builder.atomic_rmw( .atomic_rmw(
memory, memory,
AtomicOp::Add, AtomicOp::Add,
AtomicWidth::I32, AtomicWidth::I32,
@ -379,92 +379,72 @@ fn inject_start(
align: 4, align: 4,
offset: 0, offset: 0,
}, },
addr, )
one, .local_tee(local)
); .global_set(globals.thread_id);
let thread_id = builder.local_tee(local, thread_id);
let global_set = builder.global_set(globals.thread_id, thread_id);
exprs.push(global_set);
// Perform an if/else based on whether we're the first thread or not. Our // Perform an if/else based on whether we're the first thread or not. Our
// thread ID will be zero if we're the first thread, otherwise it'll be // thread ID will be zero if we're the first thread, otherwise it'll be
// nonzero (assuming we don't overflow...) // nonzero (assuming we don't overflow...)
// body.local_get(local);
let thread_id_is_nonzero = builder.local_get(local); body.if_else(
Box::new([]),
Box::new([]),
// If our thread id is nonzero then we're the second or greater thread, so // If our thread id is nonzero then we're the second or greater thread, so
// we give ourselves a stack via memory.grow and we update our stack // we give ourselves a stack via memory.grow and we update our stack
// pointer as the default stack pointer is surely wrong for us. // pointer as the default stack pointer is surely wrong for us.
let mut block = builder.if_else_block(Box::new([]), Box::new([])); |body| {
if let Some(stack_pointer) = stack_pointer { if let Some(stack_pointer) = stack_pointer {
// local0 = grow_memory(stack_size); // local0 = grow_memory(stack_size);
let grow_amount = block.i32_const((stack_size / PAGE_SIZE) as i32); body.i32_const((stack_size / PAGE_SIZE) as i32)
let memory_growth = block.memory_grow(memory, grow_amount); .memory_grow(memory)
let set_local = block.local_set(local, memory_growth); .local_set(local);
block.expr(set_local);
// if local0 == -1 then trap // if local0 == -1 then trap
let if_negative_trap = { body.block(Box::new([]), Box::new([]), |body| {
let mut block = block.block(Box::new([]), Box::new([])); let target = body.id();
body.local_get(local)
let lhs = block.local_get(local); .i32_const(-1)
let rhs = block.i32_const(-1); .binop(BinaryOp::I32Ne)
let condition = block.binop(BinaryOp::I32Ne, lhs, rhs); .br_if(target)
let id = block.id(); .unreachable();
let br_if = block.br_if(condition, id, Box::new([])); });
block.expr(br_if);
let unreachable = block.unreachable();
block.expr(unreachable);
id
};
block.expr(if_negative_trap.into());
// stack_pointer = local0 + stack_size // stack_pointer = local0 + stack_size
let get_local = block.local_get(local); body.local_get(local)
let page_size = block.i32_const(PAGE_SIZE as i32); .i32_const(PAGE_SIZE as i32)
let sp_base = block.binop(BinaryOp::I32Mul, get_local, page_size); .binop(BinaryOp::I32Mul)
let stack_size = block.i32_const(stack_size as i32); .i32_const(stack_size as i32)
let sp = block.binop(BinaryOp::I32Add, sp_base, stack_size); .binop(BinaryOp::I32Add)
let set_stack_pointer = block.global_set(stack_pointer, sp); .global_set(stack_pointer);
block.expr(set_stack_pointer);
} }
let if_nonzero_block = block.id(); },
drop(block);
// If the thread ID is zero then we can skip the update of the stack // If the thread ID is zero then we can skip the update of the stack
// pointer as we know our stack pointer is valid. We need to initialize // pointer as we know our stack pointer is valid. We need to initialize
// memory, however, so do that here. // memory, however, so do that here.
let if_zero_block = { |body| {
let mut block = builder.if_else_block(Box::new([]), Box::new([]));
match &memory_init { match &memory_init {
InitMemory::Segments(segments) => { InitMemory::Segments(segments) => {
for segment in segments { for segment in segments {
let zero = block.i32_const(0); // let zero = block.i32_const(0);
let offset = match segment.offset { match segment.offset {
InitExpr::Global(id) => block.global_get(id), InitExpr::Global(id) => body.global_get(id),
InitExpr::Value(v) => block.const_(v), InitExpr::Value(v) => body.const_(v),
}; };
let len = block.i32_const(segment.len as i32); body.i32_const(0)
let init = block.memory_init(memory, segment.id, offset, zero, len); .i32_const(segment.len as i32)
block.expr(init); .memory_init(memory, segment.id)
let drop = block.data_drop(segment.id); .data_drop(segment.id);
block.expr(drop);
} }
} }
InitMemory::Call { InitMemory::Call {
wasm_init_memory, .. wasm_init_memory, ..
} => { } => {
let call = block.call(*wasm_init_memory, Box::new([])); body.call(*wasm_init_memory);
block.expr(call);
} }
} }
block.id() },
}; );
let block = builder.if_else(thread_id_is_nonzero, if_nonzero_block, if_zero_block);
exprs.push(block);
// If we have these globals then we're using the new thread local system // If we have these globals then we're using the new thread local system
// implemented in LLVM, which means that `__wasm_init_tls` needs to be // implemented in LLVM, which means that `__wasm_init_tls` needs to be
@ -477,21 +457,19 @@ fn inject_start(
} = memory_init } = memory_init
{ {
let malloc = find_wbindgen_malloc(module)?; let malloc = find_wbindgen_malloc(module)?;
let size = builder.i32_const(tls_size as i32); body.i32_const(tls_size as i32)
let ptr = builder.call(malloc, Box::new([size])); .call(malloc)
let block = builder.call(wasm_init_tls, Box::new([ptr])); .call(wasm_init_tls);
exprs.push(block);
} }
// If a start function previously existed we're done with our own // If a start function previously existed we're done with our own
// initialization so delegate to them now. // initialization so delegate to them now.
if let Some(id) = module.start.take() { if let Some(id) = module.start.take() {
exprs.push(builder.call(id, Box::new([]))); body.call(id);
} }
// Finish off our newly generated function. // Finish off our newly generated function.
let ty = module.types.add(&[], &[]); let id = builder.finish(Vec::new(), &mut module.funcs);
let id = builder.finish(ty, Vec::new(), exprs, module);
// ... and finally flag it as the new start function // ... and finally flag it as the new start function
module.start = Some(id); module.start = Some(id);
@ -559,54 +537,39 @@ fn implement_thread_intrinsics(module: &mut Module, globals: &Globals) -> Result
struct Visitor<'a> { struct Visitor<'a> {
map: &'a HashMap<FunctionId, Intrinsic>, map: &'a HashMap<FunctionId, Intrinsic>,
globals: &'a Globals, globals: &'a Globals,
func: &'a mut LocalFunction,
} }
module.funcs.iter_local_mut().for_each(|(_id, func)| { module.funcs.iter_local_mut().for_each(|(_id, func)| {
let mut entry = func.entry_block(); let entry = func.entry_block();
Visitor { dfs_pre_order_mut(&mut Visitor { map: &map, globals }, func, entry);
map: &map,
globals,
func,
}
.visit_block_id_mut(&mut entry);
}); });
impl VisitorMut for Visitor<'_> { impl VisitorMut for Visitor<'_> {
fn local_function_mut(&mut self) -> &mut LocalFunction { fn visit_instr_mut(&mut self, instr: &mut Instr) {
self.func let call = match instr {
} Instr::Call(e) => e,
_ => return,
fn visit_expr_mut(&mut self, expr: &mut Expr) {
let call = match expr {
Expr::Call(e) => e,
other => return other.visit_mut(self),
}; };
match self.map.get(&call.func) { match self.map.get(&call.func) {
Some(Intrinsic::GetThreadId) => { Some(Intrinsic::GetThreadId) => {
assert!(call.args.is_empty()); *instr = GlobalGet {
*expr = GlobalGet {
global: self.globals.thread_id, global: self.globals.thread_id,
} }
.into(); .into();
} }
Some(Intrinsic::GetTcb) => { Some(Intrinsic::GetTcb) => {
assert!(call.args.is_empty()); *instr = GlobalGet {
*expr = GlobalGet {
global: self.globals.thread_tcb, global: self.globals.thread_tcb,
} }
.into(); .into();
} }
Some(Intrinsic::SetTcb) => { Some(Intrinsic::SetTcb) => {
assert_eq!(call.args.len(), 1); *instr = GlobalSet {
call.args[0].visit_mut(self);
*expr = GlobalSet {
global: self.globals.thread_tcb, global: self.globals.thread_tcb,
value: call.args[0],
} }
.into(); .into();
} }
None => call.visit_mut(self), None => {}
} }
} }
} }

View File

@ -19,8 +19,8 @@
#![deny(missing_docs)] #![deny(missing_docs)]
use std::collections::{BTreeMap, HashMap, HashSet}; use std::collections::{BTreeMap, HashMap, HashSet};
use walrus::ir::ExprId; use walrus::ir::Instr;
use walrus::{FunctionId, LocalFunction, LocalId, Module, TableId}; use walrus::{FunctionId, LocalId, Module, TableId};
/// A ready-to-go interpreter of a wasm module. /// A ready-to-go interpreter of a wasm module.
/// ///
@ -46,6 +46,7 @@ pub struct Interpreter {
// used in a limited capacity. // used in a limited capacity.
sp: i32, sp: i32,
mem: Vec<i32>, mem: Vec<i32>,
scratch: Vec<i32>,
// The descriptor which we're assembling, a list of `u32` entries. This is // The descriptor which we're assembling, a list of `u32` entries. This is
// very specific to wasm-bindgen and is the purpose for the existence of // very specific to wasm-bindgen and is the purpose for the existence of
@ -235,7 +236,6 @@ impl Interpreter {
let mut frame = Frame { let mut frame = Frame {
module, module,
local,
interp: self, interp: self,
locals: BTreeMap::new(), locals: BTreeMap::new(),
done: false, done: false,
@ -246,121 +246,96 @@ impl Interpreter {
frame.locals.insert(*arg, *val); frame.locals.insert(*arg, *val);
} }
if block.exprs.len() > 0 { for instr in block.instrs.iter() {
for expr in block.exprs[..block.exprs.len() - 1].iter() { frame.eval(instr);
let ret = frame.eval(*expr);
if frame.done { if frame.done {
return ret; break;
} }
} }
} self.scratch.last().cloned()
block.exprs.last().and_then(|e| frame.eval(*e))
} }
} }
struct Frame<'a> { struct Frame<'a> {
module: &'a Module, module: &'a Module,
local: &'a LocalFunction,
interp: &'a mut Interpreter, interp: &'a mut Interpreter,
locals: BTreeMap<LocalId, i32>, locals: BTreeMap<LocalId, i32>,
done: bool, done: bool,
} }
impl Frame<'_> { impl Frame<'_> {
fn local(&self, id: LocalId) -> i32 { fn eval(&mut self, instr: &Instr) {
self.locals.get(&id).cloned().unwrap_or(0)
}
fn eval(&mut self, expr: ExprId) -> Option<i32> {
use walrus::ir::*; use walrus::ir::*;
match self.local.get(expr) { let stack = &mut self.interp.scratch;
Expr::Const(c) => match c.value {
Value::I32(n) => Some(n), match instr {
Instr::Const(c) => match c.value {
Value::I32(n) => stack.push(n),
_ => panic!("non-i32 constant"), _ => panic!("non-i32 constant"),
}, },
Expr::LocalGet(e) => Some(self.local(e.local)), Instr::LocalGet(e) => stack.push(self.locals.get(&e.local).cloned().unwrap_or(0)),
Expr::LocalSet(e) => { Instr::LocalSet(e) => {
let val = self.eval(e.value).expect("must eval to i32"); let val = stack.pop().unwrap();
self.locals.insert(e.local, val); self.locals.insert(e.local, val);
None
} }
// Blindly assume all globals are the stack pointer // Blindly assume all globals are the stack pointer
Expr::GlobalGet(_) => Some(self.interp.sp), Instr::GlobalGet(_) => stack.push(self.interp.sp),
Expr::GlobalSet(e) => { Instr::GlobalSet(_) => {
let val = self.eval(e.value).expect("must eval to i32"); let val = stack.pop().unwrap();
self.interp.sp = val; self.interp.sp = val;
None
} }
// Support simple arithmetic, mainly for the stack pointer // Support simple arithmetic, mainly for the stack pointer
// manipulation // manipulation
Expr::Binop(e) => { Instr::Binop(e) => {
let lhs = self.eval(e.lhs).expect("must eval to i32"); let rhs = stack.pop().unwrap();
let rhs = self.eval(e.rhs).expect("must eval to i32"); let lhs = stack.pop().unwrap();
match e.op { stack.push(match e.op {
BinaryOp::I32Sub => Some(lhs - rhs), BinaryOp::I32Sub => lhs - rhs,
BinaryOp::I32Add => Some(lhs + rhs), BinaryOp::I32Add => lhs + rhs,
op => panic!("invalid binary op {:?}", op), op => panic!("invalid binary op {:?}", op),
} });
} }
// Support small loads/stores to the stack. These show up in debug // Support small loads/stores to the stack. These show up in debug
// mode where there's some traffic on the linear stack even when in // mode where there's some traffic on the linear stack even when in
// theory there doesn't need to be. // theory there doesn't need to be.
Expr::Load(e) => { Instr::Load(e) => {
let address = self.eval(e.address).expect("must eval to i32"); let address = stack.pop().unwrap();
let address = address as u32 + e.arg.offset; let address = address as u32 + e.arg.offset;
assert!(address % 4 == 0); assert!(address % 4 == 0);
Some(self.interp.mem[address as usize / 4]) stack.push(self.interp.mem[address as usize / 4])
} }
Expr::Store(e) => { Instr::Store(e) => {
let address = self.eval(e.address).expect("must eval to i32"); let value = stack.pop().unwrap();
let value = self.eval(e.value).expect("must eval to i32"); let address = stack.pop().unwrap();
let address = address as u32 + e.arg.offset; let address = address as u32 + e.arg.offset;
assert!(address % 4 == 0); assert!(address % 4 == 0);
self.interp.mem[address as usize / 4] = value; self.interp.mem[address as usize / 4] = value;
None
} }
Expr::Return(e) => { Instr::Return(_) => {
log::debug!("return"); log::debug!("return");
self.done = true; self.done = true;
assert!(e.values.len() <= 1);
e.values.get(0).and_then(|id| self.eval(*id))
} }
Expr::Drop(e) => { Instr::Drop(_) => {
log::debug!("drop"); log::debug!("drop");
self.eval(e.expr); stack.pop().unwrap();
None
} }
Expr::WithSideEffects(e) => { Instr::Call(e) => {
log::debug!("side effects");
for x in e.before.iter() {
self.eval(*x);
}
let ret = self.eval(e.value);
for x in e.after.iter() {
self.eval(*x);
}
return ret;
}
Expr::Call(e) => {
// If this function is calling the `__wbindgen_describe` // If this function is calling the `__wbindgen_describe`
// function, which we've precomputed the id for, then // function, which we've precomputed the id for, then
// it's telling us about the next `u32` element in the // it's telling us about the next `u32` element in the
// descriptor to return. We "call" the imported function // descriptor to return. We "call" the imported function
// here by directly inlining it. // here by directly inlining it.
if Some(e.func) == self.interp.describe_id { if Some(e.func) == self.interp.describe_id {
assert_eq!(e.args.len(), 1); let val = stack.pop().unwrap();
let val = self.eval(e.args[0]).expect("must eval to i32");
log::debug!("__wbindgen_describe({})", val); log::debug!("__wbindgen_describe({})", val);
self.interp.descriptor.push(val as u32); self.interp.descriptor.push(val as u32);
None
// If this function is calling the `__wbindgen_describe_closure` // If this function is calling the `__wbindgen_describe_closure`
// function then it's similar to the above, except there's a // function then it's similar to the above, except there's a
@ -368,21 +343,20 @@ impl Frame<'_> {
// previous arguments because they shouldn't have any side // previous arguments because they shouldn't have any side
// effects we're interested in. // effects we're interested in.
} else if Some(e.func) == self.interp.describe_closure_id { } else if Some(e.func) == self.interp.describe_closure_id {
assert_eq!(e.args.len(), 3); let val = stack.pop().unwrap();
let val = self.eval(e.args[2]).expect("must eval to i32"); drop(stack.pop());
drop(stack.pop());
log::debug!("__wbindgen_describe_closure({})", val); log::debug!("__wbindgen_describe_closure({})", val);
self.interp.descriptor_table_idx = Some(val as u32); self.interp.descriptor_table_idx = Some(val as u32);
Some(0) stack.push(0)
// ... otherwise this is a normal call so we recurse. // ... otherwise this is a normal call so we recurse.
} else { } else {
let args = e let ty = self.module.types.get(self.module.funcs.get(e.func).ty());
.args let args = (0..ty.params().len())
.iter() .map(|_| stack.pop().unwrap())
.map(|e| self.eval(*e).expect("must eval to i32"))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.interp.call(e.func, self.module, &args); self.interp.call(e.func, self.module, &args);
None
} }
} }