From 3de68a7a07b98e11582b58c4cdec9298c539091b Mon Sep 17 00:00:00 2001 From: Robert Bragg Date: Wed, 7 May 2025 21:57:51 +0100 Subject: [PATCH 1/2] Prototype support for async native functions This experiments with enabling support for async NativeFunctions that are only async from the pov of the host, and appear as synchronous from within JavaScript. Instead of running the async functions as a Promise via enqueue_job, this works by allowing Operations to be executed over multiple VM cycles, so an Operation may start some async work in one step and then further steps can poll for completion of that work and finish the Operation. In particular this works by allowing Call Operations to return an `OpStatus::Pending`value that indicates that the same Call operation needs to be executed repeatedly, until it returns an `OpStatus::Finished` status. In the case of a `Pending` status, the program counter is reset and anything that was taken off the stack is pushed back so the same Operation can be re-executed. There is a new `NativeFunction::from_async_as_sync_with_captures()` that lets the host provide a (sync) closure that itself returns / spawns a boxed Future. This is tracked internally as an `Inner::AsyncFn`. Whenever the function is `__call__`ed then (assuming the operation isn't already in a pending / running state) a new Future is spawned via the application's closure and the Operation enters a "pending" state. When a NativeFunction is pending then each `__call__` will `poll()` the spawned `Future` to see if the `async` function has a result. This effectively stalls the VM at the same Opcode while still accounting for any cycle budget and periodically yielding to the application's async runtime while waiting for an async Call Operation to finish. Limitations / Issues ==================== == Busy Loop Polling == Even though the implementation does yield back to the application's async runtime when waiting for a NativeFunction to complete, the implementation isn't ideal because it uses a noop task Context + Waker when polling NativeFunction Futures. The effectively relies on the VM polling the future in a busy loop, wasting CPU time. A better solution could be to implement a shim Waker that would flag some state on the Boa engine Context, and then adapt the Future that's used to yield the VM to the executor so that it only becomes Ready once the async NativeFunction has signalled the waker. I.e. the Waker would act like a bridge/proxy between a spawned async NativeFunction and the the Future/Task associated with the VM's async `run_async_with_budget`. This way I think the VM could remain async runtime agnostic but would be able to actually sleep while waiting for async functions instead of entering a busy yield loop. == Requires PC rewind and reverting stack state == Ideally operations that may complete over multiple steps would maintain a state machine via private registers, whereby it would not be necessary to repeatedly rewind the program counter and re-push values to the stack so that the operation can be decoded and executed repeatedly from the beginning. == Only adapts Call Operation == Currently only the Call Operation handles async NativeFunctions but there are other Call[XYZ] Operations that could be adapted too. == Not compatible with composite Operations that `call()` == The ability to track pending async functions is implemented in terms of repeatedly executing an Opcode in the VM until it signals that it's not Pending. This currently relies on being able to reset and re-execute the Operation (such as reverting program counter and stack changes). There are lots of Operations that make use of JsObject::call() internally and they would currently trigger a panic if they called an async NativeFunction because they would not be able to "resolve()" the "Pending" status that would be returned by the `call()`. Ideally all Operations that use `__call__` or `__construct__` should be fully resumable in the same way that the Call Operation is now. This would presumably be easier to achieve with Rust Coroutines if they were stable because it would otherwise be necessary to adapt composite Operations into a state machine, similar to what the compiler does for an async Future, so they can yield for async function calls and be resumed by the VM. --- core/engine/src/native_function/mod.rs | 419 +++++++++++++++--- .../engine/src/object/internal_methods/mod.rs | 31 ++ core/engine/src/vm/completion_record.rs | 76 +++- core/engine/src/vm/mod.rs | 47 +- core/engine/src/vm/opcode/await/mod.rs | 13 +- core/engine/src/vm/opcode/call/mod.rs | 24 +- .../src/vm/opcode/control_flow/return.rs | 16 +- .../src/vm/opcode/control_flow/throw.rs | 17 +- core/engine/src/vm/opcode/generator/mod.rs | 8 +- .../src/vm/opcode/generator/yield_stm.rs | 8 +- core/engine/src/vm/opcode/mod.rs | 18 +- examples/src/bin/tokio_event_loop.rs | 45 +- 12 files changed, 605 insertions(+), 117 deletions(-) diff --git a/core/engine/src/native_function/mod.rs b/core/engine/src/native_function/mod.rs index f03fe49711c..882f87bdea8 100644 --- a/core/engine/src/native_function/mod.rs +++ b/core/engine/src/native_function/mod.rs @@ -4,8 +4,12 @@ //! from native Rust functions and closures. use std::cell::RefCell; +use std::future::Future; +use std::pin::Pin; +use std::rc::Rc; use boa_gc::{custom_trace, Finalize, Gc, Trace}; +use futures_lite::FutureExt as _; use crate::job::NativeAsyncJob; use crate::value::JsVariant; @@ -40,6 +44,23 @@ pub(crate) use continuation::{CoroutineState, NativeCoroutine}; /// - The last argument is the engine [`Context`]. pub type NativeFunctionPointer = fn(&JsValue, &[JsValue], &mut Context) -> JsResult; +/// The required signature for spawning async native functions. +/// +/// # Arguments +/// +/// - The first argument represents the `this` variable of every ECMAScript function. +/// - The second argument represents the list of all arguments passed to the function. +/// - The last argument is the engine [`Context`]. +/// +/// Returns a boxed future that will be awaited by the engine, stalling the engine +/// on its current operation until the future is resolved. +pub type SpawnAsyncFunctionFn = + dyn Fn( + &JsValue, + &[JsValue], + &mut Context, + ) -> JsResult>>>>; + trait TraceableClosure: Trace { fn call(&self, this: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult; } @@ -108,6 +129,148 @@ impl JsData for NativeFunctionObject { } } +enum AsyncCallResult { + Pending, + Ready(JsResult), +} + +enum AsyncRunningState { + None, + Calling, + Constructing { new_target: JsValue }, +} +trait TraceableAsyncFunction: Trace { + fn running(&self) -> AsyncRunningState; + fn call_or_construct( + &self, + this: &JsValue, + args: &[JsValue], + new_target: &JsValue, + context: &mut Context, + ) -> AsyncCallResult; + fn poll(&self, context: &mut Context) -> AsyncCallResult { + self.call_or_construct(&JsValue::undefined(), &[], &JsValue::undefined(), context) + } +} + +#[derive(Finalize)] +enum AsyncCallState { + None, + Calling { + this: JsValue, + args: Vec, + f: Pin>>>, + }, + Constructing { + new_target: JsValue, + args: Vec, + f: Pin>>>, + }, +} +unsafe impl Trace for AsyncCallState { + custom_trace!(this, mark, { + match this { + AsyncCallState::None => {} + AsyncCallState::Calling { this, args, f: _ } => { + mark(this); + mark(args); + } + AsyncCallState::Constructing { + new_target, + args, + f: _, + } => { + mark(new_target); + mark(args); + } + } + }); +} + +#[derive(Finalize)] +struct NativeAsyncFunction { + start: Rc, + captures: T, + state: RefCell, +} +unsafe impl Trace for NativeAsyncFunction { + custom_trace!(this, mark, { + mark(&this.captures); + mark(&*this.state.borrow()); + }); +} + +impl TraceableAsyncFunction for NativeAsyncFunction +where + T: Trace, +{ + fn running(&self) -> AsyncRunningState { + match &*self.state.borrow() { + AsyncCallState::None => AsyncRunningState::None, + AsyncCallState::Calling { .. } => AsyncRunningState::Calling, + AsyncCallState::Constructing { new_target, .. } => AsyncRunningState::Constructing { + new_target: new_target.clone(), + }, + } + } + + fn call_or_construct( + &self, + this: &JsValue, + args: &[JsValue], + new_target: &JsValue, + context: &mut Context, + ) -> AsyncCallResult { + let mut state = self.state.borrow_mut(); + match *state { + AsyncCallState::None => { + if new_target.is_undefined() { + let f = (*self.start)(this, args, context); + let f = match f { + Ok(f) => f, + Err(e) => return AsyncCallResult::Ready(Err(e)), + }; + *state = AsyncCallState::Calling { + this: this.clone(), + args: args.to_vec(), + f, + }; + } else { + let f = (*self.start)(new_target, args, context); + let f = match f { + Ok(f) => f, + Err(e) => return AsyncCallResult::Ready(Err(e)), + }; + *state = AsyncCallState::Constructing { + new_target: new_target.clone(), + args: args.to_vec(), + f, + }; + } + + AsyncCallResult::Pending + } + AsyncCallState::Calling { ref mut f, .. } + | AsyncCallState::Constructing { ref mut f, .. } => { + // FIXME: figure out how to work with an async Context / Waker from the application (e.g. from Tokio) + let waker = std::task::Waker::noop(); + let mut context = std::task::Context::from_waker(waker); + let result = f.poll(&mut context); + match result { + std::task::Poll::Pending => { + //println!("Pending"); + AsyncCallResult::Pending + } + std::task::Poll::Ready(result) => { + *state = AsyncCallState::None; + AsyncCallResult::Ready(result) + } + } + } + } + } +} + /// A callable Rust function that can be invoked by the engine. /// /// `NativeFunction` functions are divided in two: @@ -129,6 +292,7 @@ pub struct NativeFunction { #[derive(Clone)] enum Inner { PointerFn(NativeFunctionPointer), + AsyncFn(Gc), Closure(Gc), } @@ -308,6 +472,40 @@ impl NativeFunction { }) } + /// Creates a `NativeFunction` from a function returning a [`Future`] and a list of traceable + /// captures. + /// + /// When called from JavaScript, the function will appear to be synchronous, but it will be + /// able to yield the VM's control to the applications event loop, enabling the native function to + /// perform I/O operations or other tasks that may take a long time to complete without blocking. + /// + /// This also ensures that scripts that make long-running calls to native functions can be + /// cooperatively scheduled within a single threaded environment (e.g. a browser) and can be + /// stopped if they take too long to complete. + pub fn from_async_as_sync_with_captures(f: F, captures: T) -> Self + where + F: Fn( + &JsValue, + &[JsValue], + &mut Context, + ) -> JsResult>>>> + + 'static, + T: Trace + 'static, + { + let ptr = Gc::into_raw(Gc::new(NativeAsyncFunction { + start: Rc::new(f), + captures, + state: RefCell::new(AsyncCallState::None), + })); + + // SAFETY: The pointer returned by `into_raw` is only used to coerce to a trait object, + unsafe { + Self { + inner: Inner::AsyncFn(Gc::from_raw(ptr)), + } + } + } + /// Creates a `NativeFunction` from a `Copy` closure. pub fn from_copy_closure(closure: F) -> Self where @@ -377,7 +575,11 @@ impl NativeFunction { } /// Calls this `NativeFunction`, forwarding the arguments to the corresponding function. - #[inline] + /// + /// This is mainly for compatibility with rustdoc tests that expect to be able to create + /// various types of native functions and call them. + /// + /// Panics if the function has an async implementation pub fn call( &self, this: &JsValue, @@ -387,6 +589,114 @@ impl NativeFunction { match self.inner { Inner::PointerFn(f) => f(this, args, context), Inner::Closure(ref c) => c.call(this, args, context), + Inner::AsyncFn(_) => unreachable!("cannot call async function synchronously"), + } + } + + /// Calls this `NativeFunction`, forwarding the arguments to the corresponding function. + /// + /// XXX: Does it make sense to allow constructors to be called if the `NativeFunction` + /// for a constructor expects a `new_target` argument which will be `undefined`? + /// + /// The ECMA spec has a more general `BuiltinCallOrConstruct` operation that differentiates + /// `this` and `newTarget` and maybe a `NativeFunction` should implement that? + #[inline] + fn try_call( + &self, + is_constructor: bool, + argument_count: usize, + context: &mut Context, + ) -> AsyncCallResult { + //println!("[NativeFunction] call, arg_count: {}", argument_count); + if let Inner::AsyncFn(ref f) = self.inner { + match f.running() { + AsyncRunningState::None => { + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let func = context.vm.stack.pop(); + let this = context.vm.stack.pop(); + let this_ref = if is_constructor { + &JsValue::undefined() + } else { + &this + }; + let result = + f.call_or_construct(this_ref, &args, &JsValue::undefined(), context); + if matches!(result, AsyncCallResult::Pending) { + context.vm.stack.push(this); + context.vm.stack.push(func); + context.vm.stack.calling_convention_push_arguments(&args); + } + result + } + AsyncRunningState::Calling => f.poll(context), + AsyncRunningState::Constructing { new_target: _ } => { + unreachable!() + } + } + } else { + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let _func = context.vm.stack.pop(); + let this = context.vm.stack.pop(); + let this = if is_constructor { + JsValue::undefined() + } else { + this + }; + AsyncCallResult::Ready(match self.inner { + Inner::PointerFn(f) => f(&this, &args, context), + Inner::Closure(ref c) => c.call(&this, &args, context), + Inner::AsyncFn(_) => unreachable!(), + }) + } + } + + fn try_construct( + &self, + argument_count: usize, + context: &mut Context, + ) -> (JsValue, AsyncCallResult) { + if let Inner::AsyncFn(ref f) = self.inner { + match f.running() { + AsyncRunningState::None => { + let new_target = context.vm.stack.pop(); + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let _func = context.vm.stack.pop(); + let _this = context.vm.stack.pop(); + ( + new_target.clone(), + f.call_or_construct(&JsValue::undefined(), &args, &new_target, context), + ) + } + AsyncRunningState::Constructing { new_target } => { + (new_target.clone(), f.poll(context)) + } + AsyncRunningState::Calling => { + unreachable!() + } + } + } else { + let new_target = context.vm.stack.pop(); + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let _func = context.vm.stack.pop(); + let _this = context.vm.stack.pop(); + let result = AsyncCallResult::Ready(match self.inner { + Inner::PointerFn(f) => f(&new_target, &args, context), + Inner::Closure(ref c) => c.call(&new_target, &args, context), + Inner::AsyncFn(_) => unreachable!(), + }); + (new_target.clone(), result) } } @@ -410,13 +720,7 @@ pub(crate) fn native_function_call( argument_count: usize, context: &mut Context, ) -> JsResult { - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let _func = context.vm.stack.pop(); - let this = context.vm.stack.pop(); - + //println!("native_function_call"); // We technically don't need this since native functions don't push any new frames to the // vm, but we'll eventually have to combine the native stack with the vm stack. context.check_runtime_limits()?; @@ -436,19 +740,23 @@ pub(crate) fn native_function_call( context.swap_realm(&mut realm); context.vm.native_active_function = Some(this_function_object); - let result = if constructor.is_some() { - function.call(&JsValue::undefined(), &args, context) - } else { - function.call(&this, &args, context) - } - .map_err(|err| err.inject_realm(context.realm().clone())); + let result = function.try_call(constructor.is_some(), argument_count, context); + + let result = match result { + AsyncCallResult::Pending => Ok(CallValue::AsyncPending), + AsyncCallResult::Ready(result) => { + context + .vm + .stack + .push(result.map_err(|err| err.inject_realm(context.realm().clone()))?); + Ok(CallValue::Complete) + } + }; context.vm.native_active_function = None; context.swap_realm(&mut realm); - context.vm.stack.push(result?); - - Ok(CallValue::Complete) + result } /// Construct an instance of this object with the specified arguments. @@ -462,6 +770,7 @@ fn native_function_construct( argument_count: usize, context: &mut Context, ) -> JsResult { + //println!("native_function_construct"); // We technically don't need this since native functions don't push any new frames to the // vm, but we'll eventually have to combine the native stack with the vm stack. context.check_runtime_limits()?; @@ -481,43 +790,49 @@ fn native_function_construct( context.swap_realm(&mut realm); context.vm.native_active_function = Some(this_function_object); - let new_target = context.vm.stack.pop(); - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let _func = context.vm.stack.pop(); - let _this = context.vm.stack.pop(); - - let result = function - .call(&new_target, &args, context) - .map_err(|err| err.inject_realm(context.realm().clone())) - .and_then(|v| match v.variant() { - JsVariant::Object(o) => Ok(o.clone()), - val => { - if constructor.expect("must be a constructor").is_base() || val.is_undefined() { - let prototype = get_prototype_from_constructor( - &new_target, - StandardConstructors::object, - context, - )?; - Ok(JsObject::from_proto_and_data_with_shared_shape( - context.root_shape(), - prototype, - OrdinaryObject, - )) - } else { - Err(JsNativeError::typ() - .with_message("derived constructor can only return an Object or undefined") - .into()) - } - } - }); + let (new_target, result) = function.try_construct(argument_count, context); + + let result = match result { + AsyncCallResult::Pending => None, + AsyncCallResult::Ready(result) => Some( + result + .map_err(|err| err.inject_realm(context.realm().clone())) + .and_then(|v| match v.variant() { + JsVariant::Object(o) => Ok(o.clone()), + val => { + if constructor.expect("must be a constructor").is_base() + || val.is_undefined() + { + let prototype = get_prototype_from_constructor( + &new_target, + StandardConstructors::object, + context, + )?; + Ok(JsObject::from_proto_and_data_with_shared_shape( + context.root_shape(), + prototype, + OrdinaryObject, + )) + } else { + Err(JsNativeError::typ() + .with_message( + "derived constructor can only return an Object or undefined", + ) + .into()) + } + } + }), + ), + }; context.vm.native_active_function = None; context.swap_realm(&mut realm); - context.vm.stack.push(result?); - - Ok(CallValue::Complete) + match result { + None => Ok(CallValue::AsyncPending), + Some(result) => { + context.vm.stack.push(result?); + Ok(CallValue::Complete) + } + } } diff --git a/core/engine/src/object/internal_methods/mod.rs b/core/engine/src/object/internal_methods/mod.rs index 87437169a07..647d3d28a0c 100644 --- a/core/engine/src/object/internal_methods/mod.rs +++ b/core/engine/src/object/internal_methods/mod.rs @@ -393,10 +393,23 @@ pub(crate) enum CallValue { argument_count: usize, }, + /// Further processing is needed. + /// + /// Unlike for `Pending`, the further processing should not block the VM and + /// be completed synchronously, it should integrate with VM cycle budgeting + /// and yielding. + AsyncPending, + /// The value has been computed and is the first element on the stack. Complete, } +pub(crate) enum ResolvedCallValue { + Ready, + Pending, + Complete, +} + impl CallValue { /// Resolves the [`CallValue`], and return if the value is complete. pub(crate) fn resolve(mut self, context: &mut Context) -> JsResult { @@ -412,7 +425,25 @@ impl CallValue { match self { Self::Ready => Ok(false), Self::Complete => Ok(true), + Self::Pending { .. } | Self::AsyncPending { .. } => unreachable!(), + } + } + + pub(crate) fn async_resolve(mut self, context: &mut Context) -> JsResult { + while let Self::Pending { + func, + object, + argument_count, + } = self + { + self = func(&object, argument_count, context)?; + } + + match self { + Self::Ready => Ok(ResolvedCallValue::Ready), + Self::Complete => Ok(ResolvedCallValue::Complete), Self::Pending { .. } => unreachable!(), + Self::AsyncPending { .. } => Ok(ResolvedCallValue::Pending), } } } diff --git a/core/engine/src/vm/completion_record.rs b/core/engine/src/vm/completion_record.rs index c726706db3a..3385c1eb5ad 100644 --- a/core/engine/src/vm/completion_record.rs +++ b/core/engine/src/vm/completion_record.rs @@ -2,6 +2,7 @@ #![allow(clippy::inline_always)] +use super::OpStatus; use crate::{Context, JsError, JsResult, JsValue}; use boa_gc::{custom_trace, Finalize, Trace}; use std::ops::ControlFlow; @@ -51,28 +52,62 @@ impl CompletionRecord { } pub(crate) trait IntoCompletionRecord { - fn into_completion_record(self, context: &mut Context) -> ControlFlow; + fn into_completion_record( + self, + context: &mut Context, + saved_pc: u32, + ) -> ControlFlow; } impl IntoCompletionRecord for () { #[inline(always)] - fn into_completion_record(self, _: &mut Context) -> ControlFlow { - ControlFlow::Continue(()) + fn into_completion_record( + self, + _: &mut Context, + _: u32, + ) -> ControlFlow { + ControlFlow::Continue(OpStatus::Finished) } } impl IntoCompletionRecord for JsError { #[inline(always)] - fn into_completion_record(self, context: &mut Context) -> ControlFlow { + fn into_completion_record( + self, + context: &mut Context, + _: u32, + ) -> ControlFlow { context.handle_error(self) } } impl IntoCompletionRecord for JsResult<()> { #[inline(always)] - fn into_completion_record(self, context: &mut Context) -> ControlFlow { + fn into_completion_record( + self, + context: &mut Context, + _: u32, + ) -> ControlFlow { match self { - Ok(()) => ControlFlow::Continue(()), + Ok(()) => ControlFlow::Continue(OpStatus::Finished), + Err(err) => context.handle_error(err), + } + } +} + +impl IntoCompletionRecord for JsResult { + #[inline(always)] + fn into_completion_record( + self, + context: &mut Context, + saved_pc: u32, + ) -> ControlFlow { + match self { + Ok(OpStatus::Finished) => ControlFlow::Continue(OpStatus::Finished), + Ok(OpStatus::Pending) => { + context.vm.frame_mut().pc = saved_pc; + ControlFlow::Continue(OpStatus::Pending) + } Err(err) => context.handle_error(err), } } @@ -80,7 +115,32 @@ impl IntoCompletionRecord for JsResult<()> { impl IntoCompletionRecord for ControlFlow { #[inline(always)] - fn into_completion_record(self, _: &mut Context) -> ControlFlow { - self + fn into_completion_record( + self, + _: &mut Context, + _: u32, + ) -> ControlFlow { + match self { + ControlFlow::Continue(()) => ControlFlow::Continue(OpStatus::Finished), + ControlFlow::Break(completion_record) => ControlFlow::Break(completion_record), + } + } +} + +impl IntoCompletionRecord for ControlFlow { + #[inline(always)] + fn into_completion_record( + self, + context: &mut Context, + saved_pc: u32, + ) -> ControlFlow { + match self { + ControlFlow::Continue(OpStatus::Finished) => ControlFlow::Continue(OpStatus::Finished), + ControlFlow::Continue(OpStatus::Pending) => { + context.vm.frame_mut().pc = saved_pc; + ControlFlow::Continue(OpStatus::Pending) + } + ControlFlow::Break(completion_record) => ControlFlow::Break(completion_record), + } } } diff --git a/core/engine/src/vm/mod.rs b/core/engine/src/vm/mod.rs index 93edde4d2d3..e62c39b35dc 100644 --- a/core/engine/src/vm/mod.rs +++ b/core/engine/src/vm/mod.rs @@ -580,9 +580,9 @@ impl Context { &mut self, f: F, opcode: Opcode, - ) -> ControlFlow + ) -> ControlFlow where - F: FnOnce(&mut Context, Opcode) -> ControlFlow, + F: FnOnce(&mut Context, Opcode) -> ControlFlow, { let frame = self.vm.frame(); let (instruction, _) = frame @@ -633,17 +633,27 @@ impl Context { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum OpStatus { + Finished, + Pending, +} + impl Context { - fn execute_instruction(&mut self, f: F, opcode: Opcode) -> ControlFlow + fn execute_instruction( + &mut self, + f: F, + opcode: Opcode, + ) -> ControlFlow where - F: FnOnce(&mut Context, Opcode) -> ControlFlow, + F: FnOnce(&mut Context, Opcode) -> ControlFlow, { f(self, opcode) } - fn execute_one(&mut self, f: F, opcode: Opcode) -> ControlFlow + fn execute_one(&mut self, f: F, opcode: Opcode) -> ControlFlow where - F: FnOnce(&mut Context, Opcode) -> ControlFlow, + F: FnOnce(&mut Context, Opcode) -> ControlFlow, { #[cfg(feature = "fuzz")] { @@ -666,7 +676,7 @@ impl Context { self.execute_instruction(f, opcode) } - fn handle_error(&mut self, err: JsError) -> ControlFlow { + fn handle_error(&mut self, err: JsError) -> ControlFlow { // If we hit the execution step limit, bubble up the error to the // (Rust) caller instead of trying to handle as an exception. if !err.is_catchable() { @@ -695,7 +705,7 @@ impl Context { let pc = self.vm.frame().pc.saturating_sub(1); if self.vm.handle_exception_at(pc) { self.vm.pending_exception = Some(err); - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } // Inject realm before crossing the function boundry @@ -705,7 +715,7 @@ impl Context { self.handle_thow() } - fn handle_return(&mut self) -> ControlFlow { + fn handle_return(&mut self) -> ControlFlow { let exit_early = self.vm.frame().exit_early(); self.vm.stack.truncate_to_frame(&self.vm.frame); @@ -716,10 +726,10 @@ impl Context { self.vm.stack.push(result); self.vm.pop_frame().expect("frame must exist"); - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) } - fn handle_yield(&mut self) -> ControlFlow { + fn handle_yield(&mut self) -> ControlFlow { let result = self.vm.take_return_value(); if self.vm.frame().exit_early() { return ControlFlow::Break(CompletionRecord::Return(result)); @@ -727,10 +737,10 @@ impl Context { self.vm.stack.push(result); self.vm.pop_frame().expect("frame must exist"); - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) } - fn handle_thow(&mut self) -> ControlFlow { + fn handle_thow(&mut self) -> ControlFlow { let mut env_fp = self.vm.frame().env_fp; if self.vm.frame().exit_early() { self.vm.environments.truncate(env_fp as usize); @@ -751,7 +761,7 @@ impl Context { let exit_early = self.vm.frame.exit_early(); if self.vm.handle_exception_at(pc) { - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } if exit_early { @@ -770,7 +780,7 @@ impl Context { } self.vm.environments.truncate(env_fp as usize); self.vm.stack.truncate_to_frame(&frame); - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) } /// Runs the current frame to completion, yielding to the caller each time `budget` @@ -800,7 +810,10 @@ impl Context { }, opcode, ) { - ControlFlow::Continue(()) => {} + ControlFlow::Continue(OpStatus::Finished) => {} + ControlFlow::Continue(OpStatus::Pending) => { + runtime_budget = 0; + } ControlFlow::Break(value) => return value, } @@ -830,7 +843,7 @@ impl Context { let opcode = Opcode::decode(*byte); match self.execute_one(Self::execute_bytecode_instruction, opcode) { - ControlFlow::Continue(()) => {} + ControlFlow::Continue(_) => {} ControlFlow::Break(value) => return value, } } diff --git a/core/engine/src/vm/opcode/await/mod.rs b/core/engine/src/vm/opcode/await/mod.rs index 3cfdcb6ecf6..39929d5b568 100644 --- a/core/engine/src/vm/opcode/await/mod.rs +++ b/core/engine/src/vm/opcode/await/mod.rs @@ -7,7 +7,7 @@ use crate::{ js_string, native_function::NativeFunction, object::FunctionObjectBuilder, - vm::{opcode::Operation, CompletionRecord, GeneratorResumeKind}, + vm::{opcode::Operation, CompletionRecord, GeneratorResumeKind, OpStatus}, Context, JsArgs, JsValue, }; use boa_gc::Gc; @@ -25,7 +25,7 @@ impl Await { pub(super) fn operation( value: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { let value = context.vm.get_register(value.into()); // 2. Let promise be ? PromiseResolve(%Promise%, value). @@ -197,7 +197,10 @@ pub(crate) struct CompletePromiseCapability; impl CompletePromiseCapability { #[inline(always)] - pub(super) fn operation((): (), context: &mut Context) -> ControlFlow { + pub(super) fn operation( + (): (), + context: &mut Context, + ) -> ControlFlow { // If the current executing function is an async function we have to resolve/reject it's promise at the end. // The relevant spec section is 3. in [AsyncBlockStart](https://tc39.es/ecma262/#sec-asyncblockstart). let Some(promise_capability) = context.vm.stack.get_promise_capability(&context.vm.frame) @@ -205,7 +208,7 @@ impl CompletePromiseCapability { return if context.vm.pending_exception.is_some() { context.handle_thow() } else { - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) }; }; @@ -226,7 +229,7 @@ impl CompletePromiseCapability { .vm .set_return_value(promise_capability.promise().clone().into()); - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) } } diff --git a/core/engine/src/vm/opcode/call/mod.rs b/core/engine/src/vm/opcode/call/mod.rs index 2e64102988e..2dbcaa2a1bc 100644 --- a/core/engine/src/vm/opcode/call/mod.rs +++ b/core/engine/src/vm/opcode/call/mod.rs @@ -3,8 +3,8 @@ use crate::{ builtins::{promise::PromiseCapability, Promise}, error::JsNativeError, module::{ModuleKind, Referrer}, - object::FunctionObjectBuilder, - vm::opcode::Operation, + object::{internal_methods::ResolvedCallValue, FunctionObjectBuilder}, + vm::opcode::{OpStatus, Operation}, Context, JsObject, JsResult, JsValue, NativeFunction, }; @@ -177,21 +177,33 @@ pub(crate) struct Call; impl Call { #[inline(always)] - pub(super) fn operation(argument_count: VaryingOperand, context: &mut Context) -> JsResult<()> { + pub(super) fn operation( + argument_count: VaryingOperand, + context: &mut Context, + ) -> JsResult { let func = context .vm .stack .calling_convention_get_function(argument_count.into()); + //println!("Call function: {:?}", func); let Some(object) = func.as_object() else { return Err(JsNativeError::typ() .with_message("not a callable function") .into()); }; - object.__call__(argument_count.into()).resolve(context)?; - - Ok(()) + match object + .__call__(argument_count.into()) + .async_resolve(context)? + { + ResolvedCallValue::Ready => Ok(OpStatus::Finished), + ResolvedCallValue::Complete => Ok(OpStatus::Finished), + ResolvedCallValue::Pending => { + //println!("Pending call"); + Ok(OpStatus::Pending) + } + } } } diff --git a/core/engine/src/vm/opcode/control_flow/return.rs b/core/engine/src/vm/opcode/control_flow/return.rs index 54d79fba130..4ddce2207e5 100644 --- a/core/engine/src/vm/opcode/control_flow/return.rs +++ b/core/engine/src/vm/opcode/control_flow/return.rs @@ -3,7 +3,7 @@ use std::ops::ControlFlow; use crate::{ vm::{ opcode::{Operation, VaryingOperand}, - CompletionRecord, + CompletionRecord, OpStatus, }, Context, JsNativeError, }; @@ -17,7 +17,10 @@ pub(crate) struct Return; impl Return { #[inline(always)] - pub(crate) fn operation((): (), context: &mut Context) -> ControlFlow { + pub(crate) fn operation( + (): (), + context: &mut Context, + ) -> ControlFlow { context.handle_return() } } @@ -37,10 +40,13 @@ pub(crate) struct CheckReturn; impl CheckReturn { #[inline(always)] - pub(crate) fn operation((): (), context: &mut Context) -> ControlFlow { + pub(crate) fn operation( + (): (), + context: &mut Context, + ) -> ControlFlow { let frame = context.vm.frame(); if !frame.construct() { - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } let this = &context.vm.stack.get_this(frame); let result = context.vm.take_return_value(); @@ -78,7 +84,7 @@ impl CheckReturn { }; context.vm.set_return_value(result); - ControlFlow::Continue(()) + ControlFlow::Continue(OpStatus::Finished) } } diff --git a/core/engine/src/vm/opcode/control_flow/throw.rs b/core/engine/src/vm/opcode/control_flow/throw.rs index 715e1142882..1099223f864 100644 --- a/core/engine/src/vm/opcode/control_flow/throw.rs +++ b/core/engine/src/vm/opcode/control_flow/throw.rs @@ -3,7 +3,7 @@ use std::ops::ControlFlow; use crate::{ vm::{ opcode::{Operation, VaryingOperand}, - CompletionRecord, + CompletionRecord, OpStatus, }, Context, JsError, JsNativeError, }; @@ -20,7 +20,7 @@ impl Throw { pub(crate) fn operation( value: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { let value = context.vm.get_register(value.into()); let error = JsError::from_opaque(value.clone()); context.vm.pending_exception = Some(error); @@ -28,7 +28,7 @@ impl Throw { // Note: -1 because we increment after fetching the opcode. let pc = context.vm.frame().pc - 1; if context.vm.handle_exception_at(pc) { - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } context.handle_thow() @@ -50,11 +50,14 @@ pub(crate) struct ReThrow; impl ReThrow { #[inline(always)] - pub(crate) fn operation((): (), context: &mut Context) -> ControlFlow { + pub(crate) fn operation( + (): (), + context: &mut Context, + ) -> ControlFlow { // Note: -1 because we increment after fetching the opcode. let pc = context.vm.frame().pc.saturating_sub(1); if context.vm.handle_exception_at(pc) { - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } // Note: If we are rethowing and there is no pending error, @@ -88,11 +91,11 @@ impl Exception { pub(crate) fn operation( dst: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { if let Some(error) = context.vm.pending_exception.take() { let error = error.to_opaque(context); context.vm.set_register(dst.into(), error); - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } // If there is no pending error, this means that `return()` was called diff --git a/core/engine/src/vm/opcode/generator/mod.rs b/core/engine/src/vm/opcode/generator/mod.rs index 1be7376c247..2278f93baa3 100644 --- a/core/engine/src/vm/opcode/generator/mod.rs +++ b/core/engine/src/vm/opcode/generator/mod.rs @@ -11,7 +11,7 @@ use crate::{ vm::{ call_frame::GeneratorResumeKind, opcode::{Operation, ReThrow}, - CompletionRecord, + CompletionRecord, OpStatus, }, Context, JsError, JsObject, JsResult, }; @@ -31,7 +31,7 @@ impl Generator { pub(super) fn operation( r#async: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { let r#async = u32::from(r#async) != 0; let active_function = context.vm.stack.get_function(context.vm.frame()); @@ -171,13 +171,13 @@ impl GeneratorNext { pub(super) fn operation( (resume_kind, value): (VaryingOperand, VaryingOperand), context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { let resume_kind = context .vm .get_register(resume_kind.into()) .to_generator_resume_kind(); match resume_kind { - GeneratorResumeKind::Normal => ControlFlow::Continue(()), + GeneratorResumeKind::Normal => ControlFlow::Continue(OpStatus::Finished), GeneratorResumeKind::Throw => context.handle_error(JsError::from_opaque( context.vm.get_register(value.into()).clone(), )), diff --git a/core/engine/src/vm/opcode/generator/yield_stm.rs b/core/engine/src/vm/opcode/generator/yield_stm.rs index d126bb4e74a..6449f82924c 100644 --- a/core/engine/src/vm/opcode/generator/yield_stm.rs +++ b/core/engine/src/vm/opcode/generator/yield_stm.rs @@ -4,7 +4,7 @@ use crate::{ builtins::async_generator::{AsyncGenerator, AsyncGeneratorState}, vm::{ opcode::{Operation, VaryingOperand}, - CompletionRecord, GeneratorResumeKind, + CompletionRecord, GeneratorResumeKind, OpStatus, }, Context, JsValue, }; @@ -21,7 +21,7 @@ impl GeneratorYield { pub(crate) fn operation( value: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { let value = context.vm.get_register(value.into()); context.vm.set_return_value(value.clone()); context.handle_yield() @@ -46,7 +46,7 @@ impl AsyncGeneratorYield { pub(crate) fn operation( value: VaryingOperand, context: &mut Context, - ) -> ControlFlow { + ) -> ControlFlow { // AsyncGeneratorYield ( value ) // https://tc39.es/ecma262/#sec-asyncgeneratoryield @@ -100,7 +100,7 @@ impl AsyncGeneratorYield { context.vm.stack.push(resume_kind); // d. Return ? AsyncGeneratorUnwrapYieldResumption(resumptionValue). - return ControlFlow::Continue(()); + return ControlFlow::Continue(OpStatus::Finished); } // 12. Else, diff --git a/core/engine/src/vm/opcode/mod.rs b/core/engine/src/vm/opcode/mod.rs index f3c944da404..665ed42e2ec 100644 --- a/core/engine/src/vm/opcode/mod.rs +++ b/core/engine/src/vm/opcode/mod.rs @@ -92,6 +92,8 @@ pub(crate) use unary_ops::*; #[doc(inline)] pub(crate) use value::*; +use super::OpStatus; + /// Specific opcodes for bindings. /// /// This separate enum exists to make matching exhaustive where needed. @@ -349,7 +351,7 @@ macro_rules! generate_opcodes { )* } - type OpcodeHandler = fn(&mut Context, usize) -> ControlFlow; + type OpcodeHandler = fn(&mut Context, usize) -> ControlFlow; const OPCODE_HANDLERS: [OpcodeHandler; 256] = { [ @@ -359,7 +361,7 @@ macro_rules! generate_opcodes { ] }; - type OpcodeHandlerBudget = fn(&mut Context, usize, &mut u32) -> ControlFlow; + type OpcodeHandlerBudget = fn(&mut Context, usize, &mut u32) -> ControlFlow; const OPCODE_HANDLERS_BUDGET: [OpcodeHandlerBudget; 256] = { [ @@ -373,12 +375,12 @@ macro_rules! generate_opcodes { paste::paste! { #[inline(always)] #[allow(unused_parens)] - fn [](context: &mut Context, pc: usize) -> ControlFlow { + fn [](context: &mut Context, pc: usize) -> ControlFlow { let bytes = &context.vm.frame.code_block.bytecode.bytecode; let (args, next_pc) = <($($($FieldType),*)?)>::decode(bytes, pc + 1); context.vm.frame_mut().pc = next_pc as u32; let result = $Variant::operation(args, context); - IntoCompletionRecord::into_completion_record(result, context) + IntoCompletionRecord::into_completion_record(result, context, pc as u32) } } )* @@ -387,13 +389,13 @@ macro_rules! generate_opcodes { paste::paste! { #[inline(always)] #[allow(unused_parens)] - fn [](context: &mut Context, pc: usize, budget: &mut u32) -> ControlFlow { + fn [](context: &mut Context, pc: usize, budget: &mut u32) -> ControlFlow { *budget = budget.saturating_sub(u32::from($Variant::COST)); let bytes = &context.vm.frame.code_block.bytecode.bytecode; let (args, next_pc) = <($($($FieldType),*)?)>::decode(bytes, pc + 1); context.vm.frame_mut().pc = next_pc as u32; let result = $Variant::operation(args, context); - IntoCompletionRecord::into_completion_record(result, context) + IntoCompletionRecord::into_completion_record(result, context, pc as u32) } } )* @@ -456,7 +458,7 @@ impl Context { pub(crate) fn execute_bytecode_instruction( &mut self, opcode: Opcode, - ) -> ControlFlow { + ) -> ControlFlow { let frame = self.vm.frame_mut(); let pc = frame.pc as usize; @@ -467,7 +469,7 @@ impl Context { &mut self, budget: &mut u32, opcode: Opcode, - ) -> ControlFlow { + ) -> ControlFlow { let frame = self.vm.frame_mut(); let pc = frame.pc as usize; diff --git a/examples/src/bin/tokio_event_loop.rs b/examples/src/bin/tokio_event_loop.rs index b2115afc62e..dc33dda1f87 100644 --- a/examples/src/bin/tokio_event_loop.rs +++ b/examples/src/bin/tokio_event_loop.rs @@ -28,7 +28,7 @@ use tokio::{task, time}; fn main() -> JsResult<()> { // An internally async event loop. This event loop blocks the execution of the thread // while executing tasks, but internally uses async to run its tasks. - internally_async_event_loop()?; + //internally_async_event_loop()?; // An externally async event loop. This event loop can yield to the runtime to concurrently // run tasks with it. @@ -213,6 +213,17 @@ fn interval(this: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult Ok(JsValue::undefined()) } +async fn host_async_fn(n: usize) -> JsResult { + println!("Host async function called"); + + for i in 0..n { + println!("Host async function iteration {i}"); + time::sleep(Duration::from_millis(500)).await; + } + println!("Host async function finished"); + Ok(JsValue::undefined()) +} + /// Adds the custom runtime to the context. fn add_runtime(context: &mut Context) { // First add the `console` object, to be able to call `console.log()`. @@ -238,6 +249,26 @@ fn add_runtime(context: &mut Context) { NativeFunction::from_fn_ptr(interval), ) .expect("the delay builtin shouldn't exist"); + + context + .register_global_builtin_callable( + js_string!("host_async"), + 1, + NativeFunction::from_async_as_sync_with_captures( + |_this, args, context| { + let Some(arg) = args.first() else { + return Err(JsNativeError::typ() + .with_message("arg must be a callable") + .into()); + }; + let arg = arg.to_u32(context)?; + + Ok(Box::pin(host_async_fn(arg as usize))) + }, + (), + ), + ) + .expect("the foo builtin shouldn't exist"); } // Script that does multiple calls to multiple async timers. @@ -246,6 +277,9 @@ const SCRIPT: &str = r" console.log(`Finished delay. Elapsed time: ${elapsed * 1000} ms`); } + console.log(`======= host_async(5) =======`); + host_async(5); + delay(1000).then(print); delay(500).then(print); delay(200).then(print); @@ -259,6 +293,12 @@ const SCRIPT: &str = r" } interval(counter, 100); + console.log(`====================`); + console.log(`Started interval job`); + console.log(`====================`); + + console.log(`======= host_async(2) =======`); + host_async(2); for(let i = 0; i <= 100000; i++) { // Emulate a long-running evaluation of a script. @@ -268,6 +308,7 @@ const SCRIPT: &str = r" // This flavor is most recommended when you have an application that: // - Needs to wait until the engine finishes executing; depends on the execution result to continue. // - Delegates the execution of the application to the engine's event loop. +#[allow(unused)] fn internally_async_event_loop() -> JsResult<()> { println!("====== Internally async event loop. ======"); @@ -337,7 +378,9 @@ async fn externally_async_event_loop() -> JsResult<()> { script.evaluate_async(context).await.unwrap(); // Run the jobs asynchronously, which avoids blocking the main thread. + println!("==============="); println!("Running jobs..."); + println!("==============="); context.run_jobs_async().await }); From b97ab83a51b50e574b488f160b9b1009c49a479c Mon Sep 17 00:00:00 2001 From: Robert Bragg Date: Thu, 26 Jun 2025 02:08:44 +0100 Subject: [PATCH 2/2] .await for async native functions outside of the VM This avoids manually polling boxed futures within the __call__ implementation for NativeAsyncFunctions. This instead stashes the boxed future under `Context::async_call` so that we can `.await` for it's completion within `Context::run_async_with_budget`. This approach should integrate much better with async runtimes like Tokio, since it doesn't involve manually polling with a task::Context + Waker that aren't managed by the current async runtime. This also means the thread can block waiting for async IO without polling for native function completion in a busy loop. This still needs further iteration, but hopefully serves as a usable draft / proof of concept. --- core/engine/src/context/mod.rs | 13 ++ core/engine/src/native_function/mod.rs | 232 ++++++++----------------- core/engine/src/vm/mod.rs | 12 +- core/engine/src/vm/opcode/call/mod.rs | 3 +- 4 files changed, 101 insertions(+), 159 deletions(-) diff --git a/core/engine/src/context/mod.rs b/core/engine/src/context/mod.rs index f47bec1606f..d6aa77d35d3 100644 --- a/core/engine/src/context/mod.rs +++ b/core/engine/src/context/mod.rs @@ -14,6 +14,7 @@ use intrinsics::Intrinsics; use temporal_rs::tzdb::FsTzdbProvider; use crate::job::Job; +use crate::native_function::AsyncCallState; use crate::vm::RuntimeLimits; use crate::{ builtins, @@ -129,6 +130,17 @@ pub struct Context { parser_identifier: u32, data: HostDefined, + + /// State of any boxed future that should be .awaited before continuing + /// to execute VM instructions. + /// + /// XXX: How do we make sure that any + /// `AsyncCallState::Finished(Result)` can't be garbage collected? + /// + /// XXX: there's maybe a better place for this, or better abstraction / + /// generalization than this, but this hopefully works for a draft / + /// proof-of-concept. + pub(crate) async_call: AsyncCallState, } impl std::fmt::Debug for Context { @@ -1131,6 +1143,7 @@ impl ContextBuilder { parser_identifier: 0, can_block: self.can_block, data: HostDefined::default(), + async_call: AsyncCallState::None, }; builtins::set_default_global_bindings(&mut context)?; diff --git a/core/engine/src/native_function/mod.rs b/core/engine/src/native_function/mod.rs index 882f87bdea8..22a65a9aa3a 100644 --- a/core/engine/src/native_function/mod.rs +++ b/core/engine/src/native_function/mod.rs @@ -9,7 +9,6 @@ use std::pin::Pin; use std::rc::Rc; use boa_gc::{custom_trace, Finalize, Gc, Trace}; -use futures_lite::FutureExt as _; use crate::job::NativeAsyncJob; use crate::value::JsVariant; @@ -134,13 +133,7 @@ enum AsyncCallResult { Ready(JsResult), } -enum AsyncRunningState { - None, - Calling, - Constructing { new_target: JsValue }, -} trait TraceableAsyncFunction: Trace { - fn running(&self) -> AsyncRunningState; fn call_or_construct( &self, this: &JsValue, @@ -148,41 +141,24 @@ trait TraceableAsyncFunction: Trace { new_target: &JsValue, context: &mut Context, ) -> AsyncCallResult; - fn poll(&self, context: &mut Context) -> AsyncCallResult { - self.call_or_construct(&JsValue::undefined(), &[], &JsValue::undefined(), context) - } } -#[derive(Finalize)] -enum AsyncCallState { +#[derive(Default, Finalize)] +pub(crate) enum AsyncCallState { + #[default] None, Calling { - this: JsValue, - args: Vec, - f: Pin>>>, - }, - Constructing { - new_target: JsValue, - args: Vec, f: Pin>>>, }, + Finished(JsResult), } unsafe impl Trace for AsyncCallState { custom_trace!(this, mark, { match this { - AsyncCallState::None => {} - AsyncCallState::Calling { this, args, f: _ } => { - mark(this); - mark(args); - } - AsyncCallState::Constructing { - new_target, - args, - f: _, - } => { - mark(new_target); - mark(args); - } + AsyncCallState::None + | AsyncCallState::Calling { .. } + | AsyncCallState::Finished(Err(_)) => {} + AsyncCallState::Finished(Ok(value)) => mark(value), } }); } @@ -191,12 +167,10 @@ unsafe impl Trace for AsyncCallState { struct NativeAsyncFunction { start: Rc, captures: T, - state: RefCell, } unsafe impl Trace for NativeAsyncFunction { custom_trace!(this, mark, { mark(&this.captures); - mark(&*this.state.borrow()); }); } @@ -204,16 +178,6 @@ impl TraceableAsyncFunction for NativeAsyncFunction where T: Trace, { - fn running(&self) -> AsyncRunningState { - match &*self.state.borrow() { - AsyncCallState::None => AsyncRunningState::None, - AsyncCallState::Calling { .. } => AsyncRunningState::Calling, - AsyncCallState::Constructing { new_target, .. } => AsyncRunningState::Constructing { - new_target: new_target.clone(), - }, - } - } - fn call_or_construct( &self, this: &JsValue, @@ -221,53 +185,18 @@ where new_target: &JsValue, context: &mut Context, ) -> AsyncCallResult { - let mut state = self.state.borrow_mut(); - match *state { - AsyncCallState::None => { - if new_target.is_undefined() { - let f = (*self.start)(this, args, context); - let f = match f { - Ok(f) => f, - Err(e) => return AsyncCallResult::Ready(Err(e)), - }; - *state = AsyncCallState::Calling { - this: this.clone(), - args: args.to_vec(), - f, - }; - } else { - let f = (*self.start)(new_target, args, context); - let f = match f { - Ok(f) => f, - Err(e) => return AsyncCallResult::Ready(Err(e)), - }; - *state = AsyncCallState::Constructing { - new_target: new_target.clone(), - args: args.to_vec(), - f, - }; - } + let f = if new_target.is_undefined() { + (*self.start)(this, args, context) + } else { + (*self.start)(new_target, args, context) + }; + let f = match f { + Ok(f) => f, + Err(e) => return AsyncCallResult::Ready(Err(e)), + }; + context.async_call = AsyncCallState::Calling { f }; - AsyncCallResult::Pending - } - AsyncCallState::Calling { ref mut f, .. } - | AsyncCallState::Constructing { ref mut f, .. } => { - // FIXME: figure out how to work with an async Context / Waker from the application (e.g. from Tokio) - let waker = std::task::Waker::noop(); - let mut context = std::task::Context::from_waker(waker); - let result = f.poll(&mut context); - match result { - std::task::Poll::Pending => { - //println!("Pending"); - AsyncCallResult::Pending - } - std::task::Poll::Ready(result) => { - *state = AsyncCallState::None; - AsyncCallResult::Ready(result) - } - } - } - } + AsyncCallResult::Pending } } @@ -495,7 +424,6 @@ impl NativeFunction { let ptr = Gc::into_raw(Gc::new(NativeAsyncFunction { start: Rc::new(f), captures, - state: RefCell::new(AsyncCallState::None), })); // SAFETY: The pointer returned by `into_raw` is only used to coerce to a trait object, @@ -608,46 +536,39 @@ impl NativeFunction { context: &mut Context, ) -> AsyncCallResult { //println!("[NativeFunction] call, arg_count: {}", argument_count); + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let func = context.vm.stack.pop(); + let this = context.vm.stack.pop(); + let this_ref = if is_constructor { + &JsValue::undefined() + } else { + &this + }; + if let Inner::AsyncFn(ref f) = self.inner { - match f.running() { - AsyncRunningState::None => { - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let func = context.vm.stack.pop(); - let this = context.vm.stack.pop(); - let this_ref = if is_constructor { - &JsValue::undefined() - } else { - &this - }; - let result = - f.call_or_construct(this_ref, &args, &JsValue::undefined(), context); - if matches!(result, AsyncCallResult::Pending) { - context.vm.stack.push(this); - context.vm.stack.push(func); - context.vm.stack.calling_convention_push_arguments(&args); - } - result + let result = match &context.async_call { + AsyncCallState::None => { + f.call_or_construct(this_ref, &args, &JsValue::undefined(), context) } - AsyncRunningState::Calling => f.poll(context), - AsyncRunningState::Constructing { new_target: _ } => { - unreachable!() + AsyncCallState::Calling { .. } => AsyncCallResult::Pending, + AsyncCallState::Finished(_) => { + let AsyncCallState::Finished(result) = std::mem::take(&mut context.async_call) + else { + unreachable!() + }; + AsyncCallResult::Ready(result) } + }; + if matches!(result, AsyncCallResult::Pending) { + context.vm.stack.push(this); + context.vm.stack.push(func); + context.vm.stack.calling_convention_push_arguments(&args); } + result } else { - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let _func = context.vm.stack.pop(); - let this = context.vm.stack.pop(); - let this = if is_constructor { - JsValue::undefined() - } else { - this - }; AsyncCallResult::Ready(match self.inner { Inner::PointerFn(f) => f(&this, &args, context), Inner::Closure(ref c) => c.call(&this, &args, context), @@ -661,43 +582,42 @@ impl NativeFunction { argument_count: usize, context: &mut Context, ) -> (JsValue, AsyncCallResult) { - if let Inner::AsyncFn(ref f) = self.inner { - match f.running() { - AsyncRunningState::None => { - let new_target = context.vm.stack.pop(); - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let _func = context.vm.stack.pop(); - let _this = context.vm.stack.pop(); - ( - new_target.clone(), - f.call_or_construct(&JsValue::undefined(), &args, &new_target, context), - ) - } - AsyncRunningState::Constructing { new_target } => { - (new_target.clone(), f.poll(context)) + let new_target = context.vm.stack.pop(); + let args = context + .vm + .stack + .calling_convention_pop_arguments(argument_count); + let func = context.vm.stack.pop(); + let this = context.vm.stack.pop(); + let result = if let Inner::AsyncFn(ref f) = self.inner { + let result = match &context.async_call { + AsyncCallState::None => { + f.call_or_construct(&JsValue::undefined(), &args, &new_target, context) } - AsyncRunningState::Calling => { - unreachable!() + AsyncCallState::Calling { .. } => AsyncCallResult::Pending, + AsyncCallState::Finished(_) => { + let AsyncCallState::Finished(result) = std::mem::take(&mut context.async_call) + else { + unreachable!() + }; + AsyncCallResult::Ready(result) } + }; + if matches!(result, AsyncCallResult::Pending) { + context.vm.stack.push(this); + context.vm.stack.push(func); + context.vm.stack.calling_convention_push_arguments(&args); + context.vm.stack.push(new_target.clone()); } + result } else { - let new_target = context.vm.stack.pop(); - let args = context - .vm - .stack - .calling_convention_pop_arguments(argument_count); - let _func = context.vm.stack.pop(); - let _this = context.vm.stack.pop(); - let result = AsyncCallResult::Ready(match self.inner { + AsyncCallResult::Ready(match self.inner { Inner::PointerFn(f) => f(&new_target, &args, context), Inner::Closure(ref c) => c.call(&new_target, &args, context), Inner::AsyncFn(_) => unreachable!(), - }); - (new_target.clone(), result) - } + }) + }; + (new_target.clone(), result) } /// Converts this `NativeFunction` into a `JsFunction` without setting its name or length. diff --git a/core/engine/src/vm/mod.rs b/core/engine/src/vm/mod.rs index e62c39b35dc..be8fa116c33 100644 --- a/core/engine/src/vm/mod.rs +++ b/core/engine/src/vm/mod.rs @@ -7,6 +7,7 @@ use crate::{ builtins::promise::{PromiseCapability, ResolvingFunctions}, environments::EnvironmentStack, + native_function::AsyncCallState, object::JsFunction, realm::Realm, script::Script, @@ -819,7 +820,15 @@ impl Context { if runtime_budget == 0 { runtime_budget = budget; - yield_now().await; + match &mut self.async_call { + AsyncCallState::None | AsyncCallState::Finished(_) => { + yield_now().await; + } + AsyncCallState::Calling { f } => { + let result = f.await; + self.async_call = AsyncCallState::Finished(result); + } + } } } @@ -846,6 +855,7 @@ impl Context { ControlFlow::Continue(_) => {} ControlFlow::Break(value) => return value, } + debug_assert!(matches!(self.async_call, AsyncCallState::None)); } CompletionRecord::Throw(JsError::from_native(JsNativeError::error())) diff --git a/core/engine/src/vm/opcode/call/mod.rs b/core/engine/src/vm/opcode/call/mod.rs index 2dbcaa2a1bc..b59ae10a4e1 100644 --- a/core/engine/src/vm/opcode/call/mod.rs +++ b/core/engine/src/vm/opcode/call/mod.rs @@ -197,8 +197,7 @@ impl Call { .__call__(argument_count.into()) .async_resolve(context)? { - ResolvedCallValue::Ready => Ok(OpStatus::Finished), - ResolvedCallValue::Complete => Ok(OpStatus::Finished), + ResolvedCallValue::Ready | ResolvedCallValue::Complete => Ok(OpStatus::Finished), ResolvedCallValue::Pending => { //println!("Pending call"); Ok(OpStatus::Pending)