From 3f5d7b1e0ecf7421f077bcf2228dd6674a9c980d Mon Sep 17 00:00:00 2001 From: Joel Dice Date: Fri, 24 Jan 2025 09:59:13 -0700 Subject: [PATCH] add component-model-async/fused.wast test This is another piece of #9582 which I'm splitting out to make review easier. This test exercises fused adapter generation for various flavors of intercomponent async->async, async->sync, and sync->async calls. The remaining changes fill in some TODOs to make the test pass. Signed-off-by: Joel Dice --- crates/cranelift/src/compiler/component.rs | 200 +++++- crates/environ/src/component.rs | 4 + crates/environ/src/fact.rs | 163 ++++- crates/environ/src/fact/signature.rs | 99 ++- crates/environ/src/fact/trampoline.rs | 629 +++++++++++++++--- crates/wasmtime/src/runtime/store.rs | 60 ++ crates/wasmtime/src/runtime/vm.rs | 33 + .../src/runtime/vm/component/libcalls.rs | 67 +- .../component-model-async/fused.wast | 251 +++++++ 9 files changed, 1403 insertions(+), 103 deletions(-) create mode 100644 tests/misc_testsuite/component-model-async/fused.wast diff --git a/crates/cranelift/src/compiler/component.rs b/crates/cranelift/src/compiler/component.rs index 796b029280a2..784f4284ba64 100644 --- a/crates/cranelift/src/compiler/component.rs +++ b/crates/cranelift/src/compiler/component.rs @@ -3,7 +3,7 @@ use crate::{compiler::Compiler, TRAP_ALWAYS, TRAP_CANNOT_ENTER, TRAP_INTERNAL_ASSERT}; use anyhow::Result; use cranelift_codegen::ir::condcodes::IntCC; -use cranelift_codegen::ir::{self, InstBuilder, MemFlags}; +use cranelift_codegen::ir::{self, InstBuilder, MemFlags, Value}; use cranelift_codegen::isa::{CallConv, TargetIsa}; use cranelift_frontend::FunctionBuilder; use std::any::Any; @@ -98,7 +98,7 @@ impl<'a> TrampolineCompiler<'a> { _ = instance; todo!() } - Trampoline::TaskReturn => todo!(), + Trampoline::TaskReturn => self.translate_task_return_call(), Trampoline::TaskWait { instance, async_, @@ -196,12 +196,12 @@ impl<'a> TrampolineCompiler<'a> { Trampoline::ResourceDrop(ty) => self.translate_resource_drop(*ty), Trampoline::ResourceTransferOwn => { self.translate_resource_libcall(host::resource_transfer_own, |me, rets| { - rets[0] = me.raise_if_resource_trapped(rets[0]); + rets[0] = me.raise_if_i32_trapped(rets[0]); }) } Trampoline::ResourceTransferBorrow => { self.translate_resource_libcall(host::resource_transfer_borrow, |me, rets| { - rets[0] = me.raise_if_resource_trapped(rets[0]); + rets[0] = me.raise_if_i32_trapped(rets[0]); }) } Trampoline::ResourceEnterCall => { @@ -212,14 +212,17 @@ impl<'a> TrampolineCompiler<'a> { me.raise_if_host_trapped(rets.pop().unwrap()); }) } - Trampoline::AsyncEnterCall => todo!(), + Trampoline::AsyncEnterCall => { + self.translate_async_enter_or_exit(host::async_enter, None, ir::types::I8) + } Trampoline::AsyncExitCall { callback, post_return, - } => { - _ = (callback, post_return); - todo!() - } + } => self.translate_async_enter_or_exit( + host::async_exit, + Some((*callback, *post_return)), + ir::types::I64, + ), Trampoline::FutureTransfer => { _ = host::future_transfer; todo!() @@ -235,25 +238,18 @@ impl<'a> TrampolineCompiler<'a> { } } - fn translate_lower_import( - &mut self, - index: LoweredIndex, - options: &CanonicalOptions, - lower_ty: TypeFuncIndex, - ) { + fn store_wasm_arguments(&mut self, args: &[Value]) -> (Value, Value) { let pointer_type = self.isa.pointer_type(); - let args = self.builder.func.dfg.block_params(self.block0).to_vec(); - let vmctx = args[0]; - let wasm_func_ty = self.types[self.signature].unwrap_func(); + let wasm_func_ty = &self.types[self.signature].unwrap_func(); // Start off by spilling all the wasm arguments into a stack slot to be // passed to the host function. - let (values_vec_ptr, values_vec_len) = match self.abi { + match self.abi { Abi::Wasm => { let (ptr, len) = self.compiler.allocate_stack_array_and_spill_args( wasm_func_ty, &mut self.builder, - &args[2..], + args, ); let len = self.builder.ins().iconst(pointer_type, i64::from(len)); (ptr, len) @@ -262,7 +258,163 @@ impl<'a> TrampolineCompiler<'a> { let params = self.builder.func.dfg.block_params(self.block0); (params[2], params[3]) } - }; + } + } + + fn translate_task_return_call(&mut self) { + match self.abi { + Abi::Wasm => {} + + // These trampolines can only actually be called by Wasm, so + // let's assert that here. + Abi::Array => { + self.builder.ins().trap(TRAP_INTERNAL_ASSERT); + return; + } + } + + let args = self.builder.func.dfg.block_params(self.block0).to_vec(); + let vmctx = args[0]; + + let (values_vec_ptr, values_vec_len) = self.store_wasm_arguments(&args[2..]); + + let (host_sig, index) = host::task_return(self.isa, &mut self.builder.func); + let host_fn = self.load_libcall(vmctx, index); + + let params = self.types[self.signature] + .unwrap_func() + .params() + .iter() + .map(|&v| { + Some(match v { + WasmValType::I32 => FlatType::I32, + WasmValType::I64 => FlatType::I64, + WasmValType::F32 => FlatType::F32, + WasmValType::F64 => FlatType::F64, + _ => return None, + }) + }) + .collect::>(); + + let ty = self.builder.ins().iconst( + ir::types::I32, + i64::from( + params + .and_then(|params| { + self.types + .get_task_return_type(&TypeTaskReturn { params }) + .map(|v| v.as_u32()) + }) + .unwrap_or(u32::MAX), + ), + ); + + let call = self.compiler.call_indirect_host( + &mut self.builder, + index, + host_sig, + host_fn, + &[vmctx, ty, values_vec_ptr, values_vec_len], + ); + let succeeded = self.builder.func.dfg.inst_results(call)[0]; + self.raise_if_host_trapped(succeeded); + self.builder.ins().return_(&[]); + } + + fn translate_async_enter_or_exit( + &mut self, + get_libcall: fn( + &dyn TargetIsa, + &mut ir::Function, + ) -> (ir::SigRef, ComponentBuiltinFunctionIndex), + callback_and_post_return: Option<( + Option, + Option, + )>, + result: ir::types::Type, + ) { + match self.abi { + Abi::Wasm => {} + + // These trampolines can only actually be called by Wasm, so + // let's assert that here. + Abi::Array => { + self.builder.ins().trap(TRAP_INTERNAL_ASSERT); + return; + } + } + + let args = self.builder.func.dfg.block_params(self.block0).to_vec(); + let vmctx = args[0]; + + let (host_sig, index) = get_libcall(self.isa, &mut self.builder.func); + let host_fn = self.load_libcall(vmctx, index); + + let mut callee_args = vec![vmctx]; + + if let Some((callback, post_return)) = callback_and_post_return { + let pointer_type = self.isa.pointer_type(); + + // callback: *mut VMFuncRef + if let Some(callback) = callback { + callee_args.push(self.builder.ins().load( + pointer_type, + MemFlags::trusted(), + vmctx, + i32::try_from(self.offsets.runtime_callback(callback)).unwrap(), + )); + } else { + callee_args.push(self.builder.ins().iconst(pointer_type, 0)); + } + + // post_return: *mut VMFuncRef + if let Some(post_return) = post_return { + callee_args.push(self.builder.ins().load( + pointer_type, + MemFlags::trusted(), + vmctx, + i32::try_from(self.offsets.runtime_post_return(post_return)).unwrap(), + )); + } else { + callee_args.push(self.builder.ins().iconst(pointer_type, 0)); + } + } + + // remaining parameters + callee_args.extend(args[2..].iter().copied()); + + let call = self.compiler.call_indirect_host( + &mut self.builder, + index, + host_sig, + host_fn, + &callee_args, + ); + + if result == ir::types::I64 { + let result = self.builder.func.dfg.inst_results(call)[0]; + let result = self.raise_if_i32_trapped(result); + self.abi_store_results(&[result]); + } else { + assert!(result == ir::types::I8); + let succeeded = self.builder.func.dfg.inst_results(call)[0]; + self.raise_if_host_trapped(succeeded); + self.builder.ins().return_(&[]); + } + } + + fn translate_lower_import( + &mut self, + index: LoweredIndex, + options: &CanonicalOptions, + lower_ty: TypeFuncIndex, + ) { + let pointer_type = self.isa.pointer_type(); + let args = self.builder.func.dfg.block_params(self.block0).to_vec(); + let vmctx = args[0]; + let wasm_func_ty = self.types[self.signature].unwrap_func(); + + let (values_vec_ptr, values_vec_len) = self.store_wasm_arguments(&args[2..]); // Below this will incrementally build both the signature of the host // function we're calling as well as the list of arguments since the @@ -457,7 +609,7 @@ impl<'a> TrampolineCompiler<'a> { ); let call = self.call_libcall(vmctx, host::resource_new32, &host_args); let result = self.builder.func.dfg.inst_results(call)[0]; - let result = self.raise_if_resource_trapped(result); + let result = self.raise_if_i32_trapped(result); self.abi_store_results(&[result]); } @@ -486,7 +638,7 @@ impl<'a> TrampolineCompiler<'a> { ); let call = self.call_libcall(vmctx, host::resource_rep32, &host_args); let result = self.builder.func.dfg.inst_results(call)[0]; - let result = self.raise_if_resource_trapped(result); + let result = self.raise_if_i32_trapped(result); self.abi_store_results(&[result]); } @@ -799,7 +951,7 @@ impl<'a> TrampolineCompiler<'a> { self.raise_if_host_trapped(succeeded); } - fn raise_if_resource_trapped(&mut self, ret: ir::Value) -> ir::Value { + fn raise_if_i32_trapped(&mut self, ret: ir::Value) -> ir::Value { let minus_one = self.builder.ins().iconst(ir::types::I64, -1); let succeeded = self.builder.ins().icmp(IntCC::NotEqual, ret, minus_one); self.raise_if_host_trapped(succeeded); diff --git a/crates/environ/src/component.rs b/crates/environ/src/component.rs index 6988e4db7431..0808e4c0df85 100644 --- a/crates/environ/src/component.rs +++ b/crates/environ/src/component.rs @@ -83,6 +83,10 @@ macro_rules! foreach_builtin_component_function { resource_enter_call(vmctx: vmctx); resource_exit_call(vmctx: vmctx) -> bool; + task_return(vmctx: vmctx, ty: u32, storage: ptr_u8, storage_len: size) -> bool; + async_enter(vmctx: vmctx, start: ptr_u8, return_: ptr_u8, caller_instance: u32, task_return_type: u32, params: u32, results: u32) -> bool; + async_exit(vmctx: vmctx, callback: ptr_u8, post_return: ptr_u8, caller_instance: u32, callee: ptr_u8, callee_instance: u32, param_count: u32, result_count: u32, flags: u32) -> u64; + future_transfer(vmctx: vmctx, src_idx: u32, src_table: u32, dst_table: u32) -> u64; stream_transfer(vmctx: vmctx, src_idx: u32, src_table: u32, dst_table: u32) -> u64; error_context_transfer(vmctx: vmctx, src_idx: u32, src_table: u32, dst_table: u32) -> u64; diff --git a/crates/environ/src/fact.rs b/crates/environ/src/fact.rs index 8e7232dce29a..ffd8b4073e8c 100644 --- a/crates/environ/src/fact.rs +++ b/crates/environ/src/fact.rs @@ -21,7 +21,7 @@ use crate::component::dfg::CoreDef; use crate::component::{ Adapter, AdapterOptions as AdapterOptionsDfg, ComponentTypesBuilder, FlatType, InterfaceType, - StringEncoding, Transcode, TypeFuncIndex, + RuntimeComponentInstanceIndex, StringEncoding, Transcode, TypeFuncIndex, }; use crate::fact::transcode::Transcoder; use crate::prelude::*; @@ -64,6 +64,11 @@ pub struct Module<'a> { imported_resource_transfer_borrow: Option, imported_resource_enter_call: Option, imported_resource_exit_call: Option, + imported_async_enter_call: Option, + imported_async_exit_call: Option, + imported_future_transfer: Option, + imported_stream_transfer: Option, + imported_error_context_transfer: Option, // Current status of index spaces from the imports generated so far. imported_funcs: PrimaryMap>, @@ -73,6 +78,11 @@ pub struct Module<'a> { funcs: PrimaryMap, helper_funcs: HashMap, helper_worklist: Vec<(FunctionId, Helper)>, + + globals_by_type: [Vec; 4], + globals: Vec, + + exports: Vec<(u32, String)>, } struct AdapterData { @@ -95,6 +105,7 @@ struct AdapterData { /// These options are typically unique per-adapter and generally aren't needed /// when translating recursive types within an adapter. struct AdapterOptions { + instance: RuntimeComponentInstanceIndex, /// The ascribed type of this adapter. ty: TypeFuncIndex, /// The global that represents the instance flags for where this adapter @@ -189,6 +200,14 @@ impl<'a> Module<'a> { imported_resource_transfer_borrow: None, imported_resource_enter_call: None, imported_resource_exit_call: None, + imported_async_enter_call: None, + imported_async_exit_call: None, + imported_future_transfer: None, + imported_stream_transfer: None, + imported_error_context_transfer: None, + globals_by_type: Default::default(), + globals: Default::default(), + exports: Vec::new(), } } @@ -242,6 +261,28 @@ impl<'a> Module<'a> { } } + fn allocate(&mut self, counts: &mut [usize; 4], ty: ValType) -> u32 { + let which = match ty { + ValType::I32 => 0, + ValType::I64 => 1, + ValType::F32 => 2, + ValType::F64 => 3, + _ => unreachable!(), + }; + + let index = counts[which]; + counts[which] += 1; + + if let Some(offset) = self.globals_by_type[which].get(index) { + *offset + } else { + let offset = u32::try_from(self.globals.len()).unwrap(); + self.globals_by_type[which].push(offset); + self.globals.push(ty); + offset + } + } + fn import_options(&mut self, ty: TypeFuncIndex, options: &AdapterOptionsDfg) -> AdapterOptions { let AdapterOptionsDfg { instance, @@ -311,6 +352,7 @@ impl<'a> Module<'a> { }); AdapterOptions { + instance: *instance, ty, flags, post_return: None, @@ -421,6 +463,89 @@ impl<'a> Module<'a> { idx } + fn import_async_enter_call(&mut self) -> FuncIndex { + self.import_simple( + "async", + "enter-call", + &[ + ValType::FUNCREF, + ValType::FUNCREF, + ValType::I32, + ValType::I32, + ValType::I32, + ValType::I32, + ], + &[], + Import::AsyncEnterCall, + |me| &mut me.imported_async_enter_call, + ) + } + + fn import_async_exit_call( + &mut self, + callback: Option, + post_return: Option, + ) -> FuncIndex { + self.import_simple( + "async", + "exit-call", + &[ + ValType::I32, + ValType::FUNCREF, + ValType::I32, + ValType::I32, + ValType::I32, + ValType::I32, + ], + &[ValType::I32], + Import::AsyncExitCall { + callback: callback + .map(|callback| self.imported_funcs.get(callback).unwrap().clone().unwrap()), + post_return: post_return.map(|post_return| { + self.imported_funcs + .get(post_return) + .unwrap() + .clone() + .unwrap() + }), + }, + |me| &mut me.imported_async_exit_call, + ) + } + + fn import_future_transfer(&mut self) -> FuncIndex { + self.import_simple( + "future", + "transfer", + &[ValType::I32; 3], + &[ValType::I32], + Import::FutureTransfer, + |me| &mut me.imported_future_transfer, + ) + } + + fn import_stream_transfer(&mut self) -> FuncIndex { + self.import_simple( + "stream", + "transfer", + &[ValType::I32; 3], + &[ValType::I32], + Import::StreamTransfer, + |me| &mut me.imported_stream_transfer, + ) + } + + fn import_error_context_transfer(&mut self) -> FuncIndex { + self.import_simple( + "error-context", + "transfer", + &[ValType::I32; 3], + &[ValType::I32], + Import::ErrorContextTransfer, + |me| &mut me.imported_error_context_transfer, + ) + } + fn import_resource_transfer_own(&mut self) -> FuncIndex { self.import_simple( "resource", @@ -496,6 +621,11 @@ impl<'a> Module<'a> { exports.export(name, ExportKind::Func, idx.as_u32()); } } + for (idx, name) in &self.exports { + exports.export(name, ExportKind::Func, *idx); + } + + let imported_global_count = u32::try_from(self.imported_globals.len()).unwrap(); // With all functions numbered the fragments of the body of each // function can be assigned into one final adapter function. @@ -528,6 +658,15 @@ impl<'a> Module<'a> { Body::Call(id) => { Instruction::Call(id_to_index[*id].as_u32()).encode(&mut body); } + Body::RefFunc(id) => { + Instruction::RefFunc(id_to_index[*id].as_u32()).encode(&mut body); + } + Body::GlobalGet(offset) => { + Instruction::GlobalGet(offset + imported_global_count).encode(&mut body); + } + Body::GlobalSet(offset) => { + Instruction::GlobalSet(offset + imported_global_count).encode(&mut body); + } } } code.raw(&body); @@ -536,10 +675,29 @@ impl<'a> Module<'a> { let traps = traps.finish(); + let mut globals = GlobalSection::new(); + for ty in &self.globals { + globals.global( + GlobalType { + val_type: *ty, + mutable: true, + shared: false, + }, + &match ty { + ValType::I32 => ConstExpr::i32_const(0), + ValType::I64 => ConstExpr::i64_const(0), + ValType::F32 => ConstExpr::f32_const(0_f32), + ValType::F64 => ConstExpr::f64_const(0_f64), + _ => unreachable!(), + }, + ); + } + let mut result = wasm_encoder::Module::new(); result.section(&self.core_types.section); result.section(&self.core_imports); result.section(&funcs); + result.section(&globals); result.section(&exports); result.section(&code); if self.debug { @@ -704,6 +862,9 @@ struct Function { enum Body { Raw(Vec, Vec<(usize, traps::Trap)>), Call(FunctionId), + RefFunc(FunctionId), + GlobalGet(u32), + GlobalSet(u32), } impl Function { diff --git a/crates/environ/src/fact/signature.rs b/crates/environ/src/fact/signature.rs index 328ec085e359..899fc8e1b4a7 100644 --- a/crates/environ/src/fact/signature.rs +++ b/crates/environ/src/fact/signature.rs @@ -13,6 +13,14 @@ pub struct Signature { pub params: Vec, /// Core wasm results. pub results: Vec, + /// Indicator to whether parameters are indirect, meaning that the first + /// entry of `params` is a pointer type which all parameters are loaded + /// through. + pub params_indirect: bool, + /// Indicator whether results are passed indirectly. This may mean that + /// `results` is an `i32` or that `params` ends with an `i32` depending on + /// the `Context`. + pub results_indirect: bool, } impl ComponentTypesBuilder { @@ -26,6 +34,16 @@ impl ComponentTypesBuilder { let ty = &self[options.ty]; let ptr_ty = options.options.ptr(); + if let (Context::Lower, true) = (&context, options.options.async_) { + return Signature { + params: vec![ptr_ty; 2], + results: vec![ValType::I32], + params_indirect: true, + results_indirect: true, + }; + } + + let mut params_indirect = false; let mut params = match self.flatten_types( &options.options, MAX_FLAT_PARAMS, @@ -33,10 +51,25 @@ impl ComponentTypesBuilder { ) { Some(list) => list, None => { + params_indirect = true; vec![ptr_ty] } }; + if options.options.async_ { + return Signature { + params, + results: if options.options.callback.is_some() { + vec![ptr_ty] + } else { + Vec::new() + }, + params_indirect, + results_indirect: false, + }; + } + + let mut results_indirect = false; let results = match self.flatten_types( &options.options, MAX_FLAT_RESULTS, @@ -44,6 +77,7 @@ impl ComponentTypesBuilder { ) { Some(list) => list, None => { + results_indirect = true; match context { // For a lifted function too-many-results gets translated to a // returned pointer where results are read from. The callee @@ -59,7 +93,70 @@ impl ComponentTypesBuilder { } } }; - Signature { params, results } + Signature { + params, + results, + params_indirect, + results_indirect, + } + } + + pub(super) fn async_start_signature(&self, options: &AdapterOptions) -> Signature { + let ty = &self[options.ty]; + let ptr_ty = options.options.ptr(); + + let mut params = vec![ptr_ty]; + + let mut results_indirect = false; + let results = match self.flatten_types( + &options.options, + MAX_FLAT_PARAMS, + self[ty.params].types.iter().copied(), + ) { + Some(list) => list, + None => { + results_indirect = true; + params.push(ptr_ty); + Vec::new() + } + }; + Signature { + params, + results, + params_indirect: false, + results_indirect, + } + } + + pub(super) fn async_return_signature(&self, options: &AdapterOptions) -> Signature { + let ty = &self[options.ty]; + let ptr_ty = options.options.ptr(); + + let mut params_indirect = false; + let mut params = match self.flatten_types( + &options.options, + if options.options.async_ { + MAX_FLAT_PARAMS + } else { + MAX_FLAT_RESULTS + }, + self[ty.results].types.iter().copied(), + ) { + Some(list) => list, + None => { + params_indirect = true; + vec![ptr_ty] + } + }; + // Add return pointer + params.push(ptr_ty); + + Signature { + params, + results: Vec::new(), + params_indirect, + results_indirect: false, + } } /// Pushes the flat version of a list of component types into a final result diff --git a/crates/environ/src/fact/trampoline.rs b/crates/environ/src/fact/trampoline.rs index b96ac5875e07..7dd0a772d239 100644 --- a/crates/environ/src/fact/trampoline.rs +++ b/crates/environ/src/fact/trampoline.rs @@ -17,9 +17,11 @@ use crate::component::{ CanonicalAbiInfo, ComponentTypesBuilder, FixedEncoding as FE, FlatType, InterfaceType, - StringEncoding, Transcode, TypeEnumIndex, TypeFlagsIndex, TypeListIndex, TypeOptionIndex, - TypeRecordIndex, TypeResourceTableIndex, TypeResultIndex, TypeTupleIndex, TypeVariantIndex, - VariantInfo, FLAG_MAY_ENTER, FLAG_MAY_LEAVE, MAX_FLAT_PARAMS, MAX_FLAT_RESULTS, + StringEncoding, Transcode, TypeComponentLocalErrorContextTableIndex, TypeEnumIndex, + TypeFlagsIndex, TypeFutureTableIndex, TypeListIndex, TypeOptionIndex, TypeRecordIndex, + TypeResourceTableIndex, TypeResultIndex, TypeStreamTableIndex, TypeTupleIndex, + TypeVariantIndex, VariantInfo, FLAG_MAY_ENTER, FLAG_MAY_LEAVE, MAX_FLAT_PARAMS, + MAX_FLAT_RESULTS, }; use crate::fact::signature::Signature; use crate::fact::transcode::Transcoder; @@ -39,6 +41,9 @@ use wasmtime_component_util::{DiscriminantSize, FlagsSize}; const MAX_STRING_BYTE_LENGTH: u32 = 1 << 31; const UTF16_TAG: u32 = 1 << 31; +const EXIT_FLAG_ASYNC_CALLER: i32 = 1 << 0; +const EXIT_FLAG_ASYNC_CALLEE: i32 = 1 << 1; + /// This value is arbitrarily chosen and should be fine to change at any time, /// it just seemed like a halfway reasonable starting point. const INITIAL_FUEL: usize = 1_000; @@ -80,50 +85,168 @@ struct Compiler<'a, 'b> { } pub(super) fn compile(module: &mut Module<'_>, adapter: &AdapterData) { + fn compiler<'a, 'b>( + module: &'b mut Module<'a>, + adapter: &AdapterData, + ) -> (Compiler<'a, 'b>, Signature, Signature) { + let lower_sig = module.types.signature(&adapter.lower, Context::Lower); + let lift_sig = module.types.signature(&adapter.lift, Context::Lift); + let ty = module + .core_types + .function(&lower_sig.params, &lower_sig.results); + let result = module + .funcs + .push(Function::new(Some(adapter.name.clone()), ty)); + + // If this type signature contains any borrowed resources then invocations + // of enter/exit call for resource-related metadata tracking must be used. + // It shouldn't matter whether the lower/lift signature is used here as both + // should return the same answer. + let emit_resource_call = module.types.contains_borrow_resource(&adapter.lower); + assert_eq!( + emit_resource_call, + module.types.contains_borrow_resource(&adapter.lift) + ); + + ( + Compiler { + types: module.types, + module, + code: Vec::new(), + nlocals: lower_sig.params.len() as u32, + free_locals: HashMap::new(), + traps: Vec::new(), + result, + fuel: INITIAL_FUEL, + emit_resource_call, + }, + lower_sig, + lift_sig, + ) + } + + let start_adapter = |module: &mut Module, param_globals| { + let sig = module.types.async_start_signature(&adapter.lift); + let ty = module.core_types.function(&sig.params, &sig.results); + let result = module.funcs.push(Function::new( + Some(format!("[async-start]{}", adapter.name)), + ty, + )); + + Compiler { + types: module.types, + module, + code: Vec::new(), + nlocals: sig.params.len() as u32, + free_locals: HashMap::new(), + traps: Vec::new(), + result, + fuel: INITIAL_FUEL, + emit_resource_call: false, + } + .compile_async_start_adapter(adapter, &sig, param_globals); + + result + }; + + let return_adapter = |module: &mut Module, result_globals| { + let sig = module.types.async_return_signature(&adapter.lift); + let ty = module.core_types.function(&sig.params, &sig.results); + let result = module.funcs.push(Function::new( + Some(format!("[async-return]{}", adapter.name)), + ty, + )); + + Compiler { + types: module.types, + module, + code: Vec::new(), + nlocals: sig.params.len() as u32, + free_locals: HashMap::new(), + traps: Vec::new(), + result, + fuel: INITIAL_FUEL, + emit_resource_call: false, + } + .compile_async_return_adapter(adapter, &sig, result_globals); + + result + }; + match (adapter.lower.options.async_, adapter.lift.options.async_) { - (false, false) => {} + (false, false) => { + let (compiler, lower_sig, lift_sig) = compiler(module, adapter); + compiler.compile_sync_to_sync_adapter(adapter, &lower_sig, &lift_sig) + } (true, true) => { - todo!() + let start = start_adapter(module, None); + let return_ = return_adapter(module, None); + let (compiler, _, lift_sig) = compiler(module, adapter); + compiler.compile_async_to_async_adapter( + adapter, + start, + return_, + i32::try_from(lift_sig.params.len()).unwrap(), + ); } (false, true) => { - todo!() + let lower_sig = module.types.signature(&adapter.lower, Context::Lower); + let param_globals = if lower_sig.params_indirect { + None + } else { + let mut counts = [0; 4]; + Some( + lower_sig + .params + .iter() + .take(if lower_sig.results_indirect { + lower_sig.params.len() - 1 + } else { + lower_sig.params.len() + }) + .map(|ty| module.allocate(&mut counts, *ty)) + .collect::>(), + ) + }; + let result_globals = if lower_sig.results_indirect { + None + } else { + let mut counts = [0; 4]; + Some( + lower_sig + .results + .iter() + .map(|ty| module.allocate(&mut counts, *ty)) + .collect::>(), + ) + }; + + let start = start_adapter(module, param_globals.as_deref()); + let return_ = return_adapter(module, result_globals.as_deref()); + let (compiler, _, lift_sig) = compiler(module, adapter); + compiler.compile_sync_to_async_adapter( + adapter, + start, + return_, + i32::try_from(lift_sig.params.len()).unwrap(), + param_globals.as_deref(), + result_globals.as_deref(), + ); } (true, false) => { - todo!() - } - } - - let lower_sig = module.types.signature(&adapter.lower, Context::Lower); - let lift_sig = module.types.signature(&adapter.lift, Context::Lift); - let ty = module - .core_types - .function(&lower_sig.params, &lower_sig.results); - let result = module - .funcs - .push(Function::new(Some(adapter.name.clone()), ty)); - - // If this type signature contains any borrowed resources then invocations - // of enter/exit call for resource-related metadata tracking must be used. - // It shouldn't matter whether the lower/lift signature is used here as both - // should return the same answer. - let emit_resource_call = module.types.contains_borrow_resource(&adapter.lower); - assert_eq!( - emit_resource_call, - module.types.contains_borrow_resource(&adapter.lift) - ); - - Compiler { - types: module.types, - module, - code: Vec::new(), - nlocals: lower_sig.params.len() as u32, - free_locals: HashMap::new(), - traps: Vec::new(), - result, - fuel: INITIAL_FUEL, - emit_resource_call, + let lift_sig = module.types.signature(&adapter.lift, Context::Lift); + let start = start_adapter(module, None); + let return_ = return_adapter(module, None); + let (compiler, ..) = compiler(module, adapter); + compiler.compile_async_to_sync_adapter( + adapter, + start, + return_, + i32::try_from(lift_sig.params.len()).unwrap(), + i32::try_from(lift_sig.results.len()).unwrap(), + ); + } } - .compile_adapter(adapter, &lower_sig, &lift_sig) } /// Compiles a helper function as specified by the `Helper` configuration. @@ -257,7 +380,294 @@ struct Memory<'a> { } impl Compiler<'_, '_> { - fn compile_adapter( + fn compile_async_to_async_adapter( + mut self, + adapter: &AdapterData, + start: FunctionId, + return_: FunctionId, + param_count: i32, + ) { + let enter = self.module.import_async_enter_call(); + let exit = self + .module + .import_async_exit_call(adapter.lift.options.callback, None); + + self.flush_code(); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(start)); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(return_)); + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const( + i32::try_from({ + let ty = &self.types[adapter.lift.ty]; + if adapter.lift.options.memory64 { + ty.task_return_type64.as_u32() + } else { + ty.task_return_type32.as_u32() + } + }) + .unwrap(), + )); + self.instruction(LocalGet(0)); + self.instruction(LocalGet(1)); + self.instruction(Call(enter.as_u32())); + + // TODO: As an optimization, consider checking the backpressure flag on the callee instance and, if it's + // unset _and_ the callee uses a callback, translate the params and call the callee function directly here + // (and make sure `exit` knows _not_ to call it in that case). + + self.module.exports.push(( + adapter.callee.as_u32(), + format!("[adapter-callee]{}", adapter.name), + )); + + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(RefFunc(adapter.callee.as_u32())); + self.instruction(I32Const( + i32::try_from(adapter.lift.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const(param_count)); + self.instruction(I32Const(1)); // leave room for the guest context result + self.instruction(I32Const(EXIT_FLAG_ASYNC_CALLER | EXIT_FLAG_ASYNC_CALLEE)); + self.instruction(Call(exit.as_u32())); + + self.finish() + } + + fn compile_sync_to_async_adapter( + mut self, + adapter: &AdapterData, + start: FunctionId, + return_: FunctionId, + param_count: i32, + param_globals: Option<&[u32]>, + result_globals: Option<&[u32]>, + ) { + let enter = self.module.import_async_enter_call(); + let exit = self + .module + .import_async_exit_call(adapter.lift.options.callback, None); + + self.flush_code(); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(start)); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(return_)); + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const( + i32::try_from({ + let ty = &self.types[adapter.lift.ty]; + if adapter.lift.options.memory64 { + ty.task_return_type64.as_u32() + } else { + ty.task_return_type32.as_u32() + } + }) + .unwrap(), + )); + + let results_local = if let Some(globals) = param_globals { + for (local, global) in globals.iter().enumerate() { + self.instruction(LocalGet(u32::try_from(local).unwrap())); + self.flush_code(); + self.module.funcs[self.result] + .body + .push(Body::GlobalSet(*global)); + } + self.instruction(I32Const(0)); // dummy params pointer + u32::try_from(globals.len()).unwrap() + } else { + self.instruction(LocalGet(0)); + 1 + }; + + if result_globals.is_some() { + self.instruction(I32Const(0)); // dummy results pointer + } else { + self.instruction(LocalGet(results_local)); + } + + self.instruction(Call(enter.as_u32())); + + // TODO: As an optimization, consider checking the backpressure flag on the callee instance and, if it's + // unset _and_ the callee uses a callback, translate the params and call the callee function directly here + // (and make sure `exit` knows _not_ to call it in that case). + + self.module.exports.push(( + adapter.callee.as_u32(), + format!("[adapter-callee]{}", adapter.name), + )); + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(RefFunc(adapter.callee.as_u32())); + self.instruction(I32Const( + i32::try_from(adapter.lift.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const(param_count)); + self.instruction(I32Const(1)); // leave room for the guest context result + self.instruction(I32Const(EXIT_FLAG_ASYNC_CALLEE)); + self.instruction(Call(exit.as_u32())); + self.instruction(Drop); + + if let Some(globals) = result_globals { + self.flush_code(); + for global in globals { + self.module.funcs[self.result] + .body + .push(Body::GlobalGet(*global)); + } + } + + self.finish() + } + + fn compile_async_to_sync_adapter( + mut self, + adapter: &AdapterData, + start: FunctionId, + return_: FunctionId, + param_count: i32, + result_count: i32, + ) { + let enter = self.module.import_async_enter_call(); + let exit = self + .module + .import_async_exit_call(None, adapter.lift.post_return); + + self.flush_code(); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(start)); + self.module.funcs[self.result] + .body + .push(Body::RefFunc(return_)); + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const( + i32::try_from({ + let ty = &self.types[adapter.lift.ty]; + if adapter.lift.options.memory64 { + ty.task_return_type64.as_u32() + } else { + ty.task_return_type32.as_u32() + } + }) + .unwrap(), + )); + self.instruction(LocalGet(0)); + self.instruction(LocalGet(1)); + self.instruction(Call(enter.as_u32())); + self.module.exports.push(( + adapter.callee.as_u32(), + format!("[adapter-callee]{}", adapter.name), + )); + self.instruction(I32Const( + i32::try_from(adapter.lower.instance.as_u32()).unwrap(), + )); + self.instruction(RefFunc(adapter.callee.as_u32())); + self.instruction(I32Const( + i32::try_from(adapter.lift.instance.as_u32()).unwrap(), + )); + self.instruction(I32Const(param_count)); + self.instruction(I32Const(result_count)); + self.instruction(I32Const(EXIT_FLAG_ASYNC_CALLER)); + self.instruction(Call(exit.as_u32())); + + self.finish() + } + + fn compile_async_start_adapter( + mut self, + adapter: &AdapterData, + sig: &Signature, + param_globals: Option<&[u32]>, + ) { + let mut temps = Vec::new(); + let param_locals = if let Some(globals) = param_globals { + for global in globals { + let ty = self.module.globals[usize::try_from(*global).unwrap()]; + + self.flush_code(); + self.module.funcs[self.result] + .body + .push(Body::GlobalGet(*global)); + temps.push(self.local_set_new_tmp(ty)); + } + temps + .iter() + .map(|t| (t.idx, t.ty)) + .chain(if sig.results_indirect { + sig.params + .iter() + .enumerate() + .map(|(i, ty)| (i as u32, *ty)) + .last() + } else { + None + }) + .collect::>() + } else { + sig.params + .iter() + .enumerate() + .map(|(i, ty)| (i as u32, *ty)) + .collect::>() + }; + + self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, false); + self.translate_params(adapter, ¶m_locals); + self.set_flag(adapter.lift.flags, FLAG_MAY_LEAVE, true); + + for tmp in temps { + self.free_temp_local(tmp); + } + + self.finish(); + } + + fn compile_async_return_adapter( + mut self, + adapter: &AdapterData, + sig: &Signature, + result_globals: Option<&[u32]>, + ) { + let param_locals = sig + .params + .iter() + .enumerate() + .map(|(i, ty)| (i as u32, *ty)) + .collect::>(); + + self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, false); + self.translate_results(adapter, ¶m_locals, ¶m_locals); + self.set_flag(adapter.lower.flags, FLAG_MAY_LEAVE, true); + + if let Some(globals) = result_globals { + self.flush_code(); + for global in globals { + self.module.funcs[self.result] + .body + .push(Body::GlobalSet(*global)); + } + } + + self.finish() + } + + fn compile_sync_to_sync_adapter( mut self, adapter: &AdapterData, lower_sig: &Signature, @@ -375,9 +785,12 @@ impl Compiler<'_, '_> { // TODO: handle subtyping assert_eq!(src_tys.len(), dst_tys.len()); - let src_flat = + let src_flat = if adapter.lower.options.async_ { + None + } else { self.types - .flatten_types(lower_opts, MAX_FLAT_PARAMS, src_tys.iter().copied()); + .flatten_types(lower_opts, MAX_FLAT_PARAMS, src_tys.iter().copied()) + }; let dst_flat = self.types .flatten_types(lift_opts, MAX_FLAT_PARAMS, dst_tys.iter().copied()); @@ -404,16 +817,28 @@ impl Compiler<'_, '_> { let dst = if let Some(flat) = &dst_flat { Destination::Stack(flat, lift_opts) } else { - // If there are too many parameters then space is allocated in the - // destination module for the parameters via its `realloc` function. - let abi = CanonicalAbiInfo::record(dst_tys.iter().map(|t| self.types.canonical_abi(t))); - let (size, align) = if lift_opts.memory64 { - (abi.size64, abi.align64) + if lift_opts.async_ { + let align = dst_tys + .iter() + .map(|t| self.types.align(lift_opts, t)) + .max() + .unwrap_or(1); + let (addr, ty) = *param_locals.last().expect("no retptr"); + assert_eq!(ty, lift_opts.ptr()); + Destination::Memory(self.memory_operand(lift_opts, TempLocal::new(addr, ty), align)) } else { - (abi.size32, abi.align32) - }; - let size = MallocSize::Const(size); - Destination::Memory(self.malloc(lift_opts, size, align)) + // If there are too many parameters then space is allocated in the + // destination module for the parameters via its `realloc` function. + let abi = + CanonicalAbiInfo::record(dst_tys.iter().map(|t| self.types.canonical_abi(t))); + let (size, align) = if lift_opts.memory64 { + (abi.size64, abi.align64) + } else { + (abi.size32, abi.align32) + }; + let size = MallocSize::Const(size); + Destination::Memory(self.malloc(lift_opts, size, align)) + } }; let srcs = src @@ -429,7 +854,7 @@ impl Compiler<'_, '_> { // If the destination was linear memory instead of the stack then the // actual parameter that we're passing is the address of the values // stored, so ensure that's happening in the wasm body here. - if let Destination::Memory(mem) = dst { + if let (Destination::Memory(mem), false) = (dst, lift_opts.async_) { self.instruction(LocalGet(mem.addr.idx)); self.free_temp_local(mem.addr); } @@ -456,12 +881,21 @@ impl Compiler<'_, '_> { let lift_opts = &adapter.lift.options; let lower_opts = &adapter.lower.options; - let src_flat = - self.types - .flatten_types(lift_opts, MAX_FLAT_RESULTS, src_tys.iter().copied()); - let dst_flat = + let src_flat = self.types.flatten_types( + lift_opts, + if lift_opts.async_ { + MAX_FLAT_PARAMS + } else { + MAX_FLAT_RESULTS + }, + src_tys.iter().copied(), + ); + let dst_flat = if lower_opts.async_ { + None + } else { self.types - .flatten_types(lower_opts, MAX_FLAT_RESULTS, dst_tys.iter().copied()); + .flatten_types(lower_opts, MAX_FLAT_RESULTS, dst_tys.iter().copied()) + }; let src = if src_flat.is_some() { Source::Stack(Stack { @@ -478,7 +912,7 @@ impl Compiler<'_, '_> { .map(|t| self.types.align(lift_opts, t)) .max() .unwrap_or(1); - assert_eq!(result_locals.len(), 1); + assert_eq!(result_locals.len(), if lower_opts.async_ { 2 } else { 1 }); let (addr, ty) = result_locals[0]; assert_eq!(ty, lift_opts.ptr()); Source::Memory(self.memory_operand(lift_opts, TempLocal::new(addr, ty), align)) @@ -600,13 +1034,11 @@ impl Compiler<'_, '_> { InterfaceType::Option(_) | InterfaceType::Result(_) => 2, // TODO(#6696) - something nonzero, is 1 right? - InterfaceType::Own(_) | InterfaceType::Borrow(_) => 1, - - InterfaceType::Future(_) + InterfaceType::Own(_) + | InterfaceType::Borrow(_) + | InterfaceType::Future(_) | InterfaceType::Stream(_) - | InterfaceType::ErrorContext(_) => { - todo!() - } + | InterfaceType::ErrorContext(_) => 1, }; match self.fuel.checked_sub(cost) { @@ -641,10 +1073,10 @@ impl Compiler<'_, '_> { InterfaceType::Result(t) => self.translate_result(*t, src, dst_ty, dst), InterfaceType::Own(t) => self.translate_own(*t, src, dst_ty, dst), InterfaceType::Borrow(t) => self.translate_borrow(*t, src, dst_ty, dst), - InterfaceType::Future(_) - | InterfaceType::Stream(_) - | InterfaceType::ErrorContext(_) => { - todo!() + InterfaceType::Future(t) => self.translate_future(*t, src, dst_ty, dst), + InterfaceType::Stream(t) => self.translate_stream(*t, src, dst_ty, dst), + InterfaceType::ErrorContext(t) => { + self.translate_error_context(*t, src, dst_ty, dst) } } } @@ -2472,6 +2904,51 @@ impl Compiler<'_, '_> { } } + fn translate_future( + &mut self, + src_ty: TypeFutureTableIndex, + src: &Source<'_>, + dst_ty: &InterfaceType, + dst: &Destination, + ) { + let dst_ty = match dst_ty { + InterfaceType::Future(t) => *t, + _ => panic!("expected a `Future`"), + }; + let transfer = self.module.import_future_transfer(); + self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer); + } + + fn translate_stream( + &mut self, + src_ty: TypeStreamTableIndex, + src: &Source<'_>, + dst_ty: &InterfaceType, + dst: &Destination, + ) { + let dst_ty = match dst_ty { + InterfaceType::Stream(t) => *t, + _ => panic!("expected a `Stream`"), + }; + let transfer = self.module.import_stream_transfer(); + self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer); + } + + fn translate_error_context( + &mut self, + src_ty: TypeComponentLocalErrorContextTableIndex, + src: &Source<'_>, + dst_ty: &InterfaceType, + dst: &Destination, + ) { + let dst_ty = match dst_ty { + InterfaceType::ErrorContext(t) => *t, + _ => panic!("expected an `ErrorContext`"), + }; + let transfer = self.module.import_error_context_transfer(); + self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer); + } + fn translate_own( &mut self, src_ty: TypeResourceTableIndex, @@ -2484,7 +2961,7 @@ impl Compiler<'_, '_> { _ => panic!("expected an `Own`"), }; let transfer = self.module.import_resource_transfer_own(); - self.translate_resource(src_ty, src, dst_ty, dst, transfer); + self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer); } fn translate_borrow( @@ -2500,7 +2977,7 @@ impl Compiler<'_, '_> { }; let transfer = self.module.import_resource_transfer_borrow(); - self.translate_resource(src_ty, src, dst_ty, dst, transfer); + self.translate_handle(src_ty.as_u32(), src, dst_ty.as_u32(), dst, transfer); } /// Translates the index `src`, which resides in the table `src_ty`, into @@ -2510,11 +2987,11 @@ impl Compiler<'_, '_> { /// cranelift-generated trampoline to satisfy this import will call. The /// `transfer` function is an imported function which takes the src, src_ty, /// and dst_ty, and returns the dst index. - fn translate_resource( + fn translate_handle( &mut self, - src_ty: TypeResourceTableIndex, + src_ty: u32, src: &Source<'_>, - dst_ty: TypeResourceTableIndex, + dst_ty: u32, dst: &Destination, transfer: FuncIndex, ) { @@ -2523,8 +3000,8 @@ impl Compiler<'_, '_> { Source::Memory(mem) => self.i32_load(mem), Source::Stack(stack) => self.stack_get(stack, ValType::I32), } - self.instruction(I32Const(src_ty.as_u32() as i32)); - self.instruction(I32Const(dst_ty.as_u32() as i32)); + self.instruction(I32Const(src_ty as i32)); + self.instruction(I32Const(dst_ty as i32)); self.instruction(Call(transfer.as_u32())); match dst { Destination::Memory(mem) => self.i32_store(mem), diff --git a/crates/wasmtime/src/runtime/store.rs b/crates/wasmtime/src/runtime/store.rs index edb9260dc2fb..c400dad359b9 100644 --- a/crates/wasmtime/src/runtime/store.rs +++ b/crates/wasmtime/src/runtime/store.rs @@ -2549,7 +2549,67 @@ impl AsyncCx { } } +unsafe impl crate::runtime::vm::VMComponentAsyncStore for StoreInner { + fn task_return( + &mut self, + ty: wasmtime_environ::component::TypeTaskReturnIndex, + storage: *mut ValRaw, + storage_len: usize, + ) -> Result<()> { + _ = (ty, storage, storage_len); + todo!() + } + + fn async_enter( + &mut self, + start: *mut VMFuncRef, + return_: *mut VMFuncRef, + caller_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + task_return_type: wasmtime_environ::component::TypeTaskReturnIndex, + params: u32, + results: u32, + ) -> Result<()> { + _ = ( + start, + return_, + caller_instance, + task_return_type, + params, + results, + ); + todo!() + } + + fn async_exit( + &mut self, + callback: *mut VMFuncRef, + post_return: *mut VMFuncRef, + caller_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + callee: *mut VMFuncRef, + callee_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + param_count: u32, + result_count: u32, + flags: u32, + ) -> Result { + _ = ( + callback, + post_return, + caller_instance, + callee, + callee_instance, + param_count, + result_count, + flags, + ); + todo!() + } +} + unsafe impl crate::runtime::vm::VMStore for StoreInner { + fn component_async_store(&mut self) -> &mut dyn crate::runtime::vm::VMComponentAsyncStore { + self + } + fn store_opaque(&self) -> &StoreOpaque { &self.inner } diff --git a/crates/wasmtime/src/runtime/vm.rs b/crates/wasmtime/src/runtime/vm.rs index 57c0a54269cf..61ff3998df3b 100644 --- a/crates/wasmtime/src/runtime/vm.rs +++ b/crates/wasmtime/src/runtime/vm.rs @@ -116,6 +116,37 @@ cfg_if::cfg_if! { } } +pub unsafe trait VMComponentAsyncStore { + fn task_return( + &mut self, + ty: wasmtime_environ::component::TypeTaskReturnIndex, + storage: *mut ValRaw, + storage_len: usize, + ) -> Result<()>; + + fn async_enter( + &mut self, + start: *mut VMFuncRef, + return_: *mut VMFuncRef, + caller_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + task_return_type: wasmtime_environ::component::TypeTaskReturnIndex, + params: u32, + results: u32, + ) -> Result<()>; + + fn async_exit( + &mut self, + callback: *mut VMFuncRef, + post_return: *mut VMFuncRef, + caller_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + callee: *mut VMFuncRef, + callee_instance: wasmtime_environ::component::RuntimeComponentInstanceIndex, + param_count: u32, + result_count: u32, + flags: u32, + ) -> Result; +} + /// Dynamic runtime functionality needed by this crate throughout the execution /// of a wasm instance. /// @@ -189,6 +220,8 @@ pub unsafe trait VMStore { /// Metadata required for resources for the component model. #[cfg(feature = "component-model")] fn component_calls(&mut self) -> &mut component::CallContexts; + + fn component_async_store(&mut self) -> &mut dyn VMComponentAsyncStore; } impl Deref for dyn VMStore + '_ { diff --git a/crates/wasmtime/src/runtime/vm/component/libcalls.rs b/crates/wasmtime/src/runtime/vm/component/libcalls.rs index 4932a479fa2f..5e16523e74ad 100644 --- a/crates/wasmtime/src/runtime/vm/component/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/component/libcalls.rs @@ -3,11 +3,15 @@ use crate::prelude::*; use crate::runtime::vm::component::{ComponentInstance, VMComponentContext}; use crate::runtime::vm::{HostResultHasUnwindSentinel, VmSafe}; +use crate::vm::VMFuncRef; +use crate::ValRaw; use core::cell::Cell; use core::convert::Infallible; use core::ptr::NonNull; use core::slice; -use wasmtime_environ::component::TypeResourceTableIndex; +use wasmtime_environ::component::{ + RuntimeComponentInstanceIndex, TypeResourceTableIndex, TypeTaskReturnIndex, +}; const UTF16_TAG: usize = 1 << 31; @@ -572,6 +576,67 @@ unsafe fn trap(_vmctx: NonNull, code: u8) -> Result, + ty: u32, + storage: *mut u8, + storage_len: usize, +) -> Result<()> { + ComponentInstance::from_vmctx(vmctx, |instance| { + (*instance.store()).component_async_store().task_return( + TypeTaskReturnIndex::from_u32(ty), + storage.cast::(), + storage_len, + ) + }) +} + +unsafe fn async_enter( + vmctx: NonNull, + start: *mut u8, + return_: *mut u8, + caller_instance: u32, + task_return_type: u32, + params: u32, + results: u32, +) -> Result<()> { + ComponentInstance::from_vmctx(vmctx, |instance| { + (*instance.store()).component_async_store().async_enter( + start.cast::(), + return_.cast::(), + RuntimeComponentInstanceIndex::from_u32(caller_instance), + TypeTaskReturnIndex::from_u32(task_return_type), + params, + results, + ) + }) +} + +unsafe fn async_exit( + vmctx: NonNull, + callback: *mut u8, + post_return: *mut u8, + caller_instance: u32, + callee: *mut u8, + callee_instance: u32, + param_count: u32, + result_count: u32, + flags: u32, +) -> Result { + ComponentInstance::from_vmctx(vmctx, |instance| { + (*instance.store()).component_async_store().async_exit( + callback.cast::(), + post_return.cast::(), + RuntimeComponentInstanceIndex::from_u32(caller_instance), + callee.cast::(), + RuntimeComponentInstanceIndex::from_u32(callee_instance), + param_count, + result_count, + flags, + ) + }) +} + unsafe fn future_transfer( vmctx: NonNull, src_idx: u32, diff --git a/tests/misc_testsuite/component-model-async/fused.wast b/tests/misc_testsuite/component-model-async/fused.wast new file mode 100644 index 000000000000..66d6d3081a57 --- /dev/null +++ b/tests/misc_testsuite/component-model-async/fused.wast @@ -0,0 +1,251 @@ +;;! component_model_async = true +;;! reference_types = true +;;! gc_types = true +;;! multi_memory = true + +;; async lower -> async lift without callback +(component + (component $lifter + (core module $m + (import "" "task.return" (func $task-return (param i32))) + (func (export "foo") (param i32) (call $task-return (local.get 0))) + ) + (core type $task-return (func (param i32))) + (core func $task-return (canon task.return $task-return)) + (core instance $i (instantiate $m + (with "" (instance (export "task.return" (func $task-return)))) + )) + + (func (export "foo") (param "p1" u32) (result u32) + (canon lift (core func $i "foo") async) + ) + ) + + (component $lowerer + (import "a" (func $foo (param "p1" u32) (result u32))) + (core module $libc (memory (export "memory") 1)) + (core instance $libc (instantiate $libc)) + (core func $foo (canon lower (func $foo) async (memory $libc "memory"))) + (core module $m + (import "libc" "memory" (memory 1)) + (import "" "foo" (func $foo (param i32 i32) (result i32))) + (func (export "run") + block + (i32.store offset=0 (i32.const 1200) (i32.const 42)) + (call $foo (i32.const 1200) (i32.const 1204)) + (i32.eq (i32.load offset=0 (i32.const 1204)) (i32.const 42)) + br_if 0 + unreachable + end + ) + ) + (core instance $i (instantiate $m + (with "libc" (instance $libc)) + (with "" (instance (export "foo" (func $foo)))) + )) + (func (export "run") (canon lift (core func $i "run"))) + ) + + (instance $lifter (instantiate $lifter)) + (instance $lowerer (instantiate $lowerer (with "a" (func $lifter "foo")))) + (func (export "run") (alias export $lowerer "run")) +) + +;; TODO: this requires async support in `wasmtime-wast`: +;;(assert_return (invoke "run")) + +;; async lower -> async lift with callback +(component + (component $lifter + (core module $m + (import "" "task.return" (func $task-return (param i32))) + (func (export "callback") (param i32 i32 i32 i32) (result i32) unreachable) + (func (export "foo") (param i32) (result i32) + (call $task-return (local.get 0)) + i32.const 0 + ) + ) + (core type $task-return (func (param i32))) + (core func $task-return (canon task.return $task-return)) + (core instance $i (instantiate $m + (with "" (instance (export "task.return" (func $task-return)))) + )) + + (func (export "foo") (param "p1" u32) (result u32) + (canon lift (core func $i "foo") async (callback (func $i "callback"))) + ) + ) + + (component $lowerer + (import "a" (func $foo (param "p1" u32) (result u32))) + (core module $libc (memory (export "memory") 1)) + (core instance $libc (instantiate $libc)) + (core func $foo (canon lower (func $foo) async (memory $libc "memory"))) + (core module $m + (import "libc" "memory" (memory 1)) + (import "" "foo" (func $foo (param i32 i32) (result i32))) + (func (export "run") + block + (i32.store offset=0 (i32.const 1200) (i32.const 42)) + (call $foo (i32.const 1200) (i32.const 1204)) + (i32.eq (i32.load offset=0 (i32.const 1204)) (i32.const 42)) + br_if 0 + unreachable + end + ) + ) + (core instance $i (instantiate $m + (with "libc" (instance $libc)) + (with "" (instance (export "foo" (func $foo)))) + )) + (func (export "run") (canon lift (core func $i "run"))) + ) + + (instance $lifter (instantiate $lifter)) + (instance $lowerer (instantiate $lowerer (with "a" (func $lifter "foo")))) + (func (export "run") (alias export $lowerer "run")) +) + +;; TODO: this requires async support in `wasmtime-wast`: +;;(assert_return (invoke "run")) + +;; async lower -> sync lift +(component + (component $lifter + (core module $m + (func (export "foo") (param i32) (result i32) + local.get 0 + ) + ) + (core instance $i (instantiate $m)) + (func (export "foo") (param "p1" u32) (result u32) + (canon lift (core func $i "foo")) + ) + ) + + (component $lowerer + (import "a" (func $foo (param "p1" u32) (result u32))) + (core module $libc (memory (export "memory") 1)) + (core instance $libc (instantiate $libc)) + (core func $foo (canon lower (func $foo) async (memory $libc "memory"))) + (core module $m + (import "libc" "memory" (memory 1)) + (import "" "foo" (func $foo (param i32 i32) (result i32))) + (func (export "run") + block + (i32.store offset=0 (i32.const 1200) (i32.const 42)) + (call $foo (i32.const 1200) (i32.const 1204)) + (i32.eq (i32.load offset=0 (i32.const 1204)) (i32.const 42)) + br_if 0 + unreachable + end + ) + ) + (core instance $i (instantiate $m + (with "libc" (instance $libc)) + (with "" (instance (export "foo" (func $foo)))) + )) + (func (export "run") (canon lift (core func $i "run"))) + ) + + (instance $lifter (instantiate $lifter)) + (instance $lowerer (instantiate $lowerer (with "a" (func $lifter "foo")))) + (func (export "run") (alias export $lowerer "run")) +) + +;; TODO: this requires async support in `wasmtime-wast`: +;;(assert_return (invoke "run")) + +;; sync lower -> async lift without callback +(component + (component $lifter + (core module $m + (import "" "task.return" (func $task-return (param i32))) + (func (export "foo") (param i32) (call $task-return (local.get 0))) + ) + (core type $task-return (func (param i32))) + (core func $task-return (canon task.return $task-return)) + (core instance $i (instantiate $m + (with "" (instance (export "task.return" (func $task-return)))) + )) + + (func (export "foo") (param "p1" u32) (result u32) + (canon lift (core func $i "foo") async) + ) + ) + + (component $lowerer + (import "a" (func $foo (param "p1" u32) (result u32))) + (core func $foo (canon lower (func $foo))) + (core module $m + (import "" "foo" (func $foo (param i32) (result i32))) + (func (export "run") + block + (i32.eq (call $foo (i32.const 42)) (i32.const 42)) + br_if 0 + unreachable + end + ) + ) + (core instance $i (instantiate $m + (with "" (instance (export "foo" (func $foo)))) + )) + (func (export "run") (canon lift (core func $i "run"))) + ) + + (instance $lifter (instantiate $lifter)) + (instance $lowerer (instantiate $lowerer (with "a" (func $lifter "foo")))) + (func (export "run") (alias export $lowerer "run")) +) + +;; TODO: this requires async support in `wasmtime-wast`: +;;(assert_return (invoke "run")) + +;; async lower -> async lift with callback +(component + (component $lifter + (core module $m + (import "" "task.return" (func $task-return (param i32))) + (func (export "callback") (param i32 i32 i32 i32) (result i32) unreachable) + (func (export "foo") (param i32) (result i32) + (call $task-return (local.get 0)) + i32.const 0 + ) + ) + (core type $task-return (func (param i32))) + (core func $task-return (canon task.return $task-return)) + (core instance $i (instantiate $m + (with "" (instance (export "task.return" (func $task-return)))) + )) + + (func (export "foo") (param "p1" u32) (result u32) + (canon lift (core func $i "foo") async (callback (func $i "callback"))) + ) + ) + + (component $lowerer + (import "a" (func $foo (param "p1" u32) (result u32))) + (core func $foo (canon lower (func $foo))) + (core module $m + (import "" "foo" (func $foo (param i32) (result i32))) + (func (export "run") + block + (i32.eq (call $foo (i32.const 42)) (i32.const 42)) + br_if 0 + unreachable + end + ) + ) + (core instance $i (instantiate $m + (with "" (instance (export "foo" (func $foo)))) + )) + (func (export "run") (canon lift (core func $i "run"))) + ) + + (instance $lifter (instantiate $lifter)) + (instance $lowerer (instantiate $lowerer (with "a" (func $lifter "foo")))) + (func (export "run") (alias export $lowerer "run")) +) + +;; TODO: this requires async support in `wasmtime-wast`: +;;(assert_return (invoke "run"))