Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: simplify async op error construction #1039

Merged
merged 5 commits into from
Jan 9, 2025
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

60 changes: 20 additions & 40 deletions core/00_infra.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
Promise,
PromiseReject,
PromiseResolve,
PromisePrototypeThen,
PromisePrototypeCatch,
RangeError,
ReferenceError,
SafeArrayIterator,
Expand Down Expand Up @@ -133,18 +133,22 @@
MapPrototypeSet(promiseMap, oldPromiseId, oldPromise);
}

const promise = new Promise((resolve) => {
promiseRing[idx] = resolve;
const promise = new Promise((resolve, reject) => {
promiseRing[idx] = [resolve, reject];
Comment on lines +136 to +137
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you run a op_void_async_deferred microbenchmark? Slighty worried this will cause a regression because it's in the hot path. Maybe { resolve, reject } is faster because of shape optimizations.

Copy link
Member Author

@crowlKats crowlKats Jan 9, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

main:

test bench_op_async_void_deferred         ... bench:     739,054 ns/iter (+/- 77,089)
test bench_op_async_void_deferred_nofast  ... bench:     715,870 ns/iter (+/- 78,892)
test bench_op_async_void_deferred_return  ... bench:     678,101 ns/iter (+/- 38,345)

PR:

test bench_op_async_void_deferred         ... bench:     712,190 ns/iter (+/- 61,168)
test bench_op_async_void_deferred_nofast  ... bench:     742,969 ns/iter (+/- 52,853)
test bench_op_async_void_deferred_return  ... bench:     689,404 ns/iter (+/- 41,140)

PR with suggestion:

test bench_op_async_void_deferred         ... bench:     742,152 ns/iter (+/- 66,355)
test bench_op_async_void_deferred_nofast  ... bench:     736,991 ns/iter (+/- 70,942)
test bench_op_async_void_deferred_return  ... bench:     689,867 ns/iter (+/- 39,801)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the array allocation is a concern we could also move two arrays of ids instead:

const promiseRing = ArrayPrototypeFill(new Array(RING_SIZE), NO_PROMISE);
const rejectRing = ArrayPrototypeFill(new Array(RING_SIZE), NO_PROMISE);

// later
const promise = new Promise((resolve, reject) => {
  promiseRing[idx] = resolve;
  promiseReject[idx] = reject;
});

Basically keep two rings at the same time. That should be better for memory and GC pressure.

});
const wrappedPromise = PromisePrototypeThen(
const wrappedPromise = PromisePrototypeCatch(
promise,
unwrapOpError(),
(res) => {
// recreate the stacktrace and strip eventLoopTick() calls from stack trace
ErrorCaptureStackTrace(res, eventLoopTick);
throw res;
},
);
wrappedPromise[promiseIdSymbol] = promiseId;
return wrappedPromise;
}

function __resolvePromise(promiseId, res) {
function __resolvePromise(promiseId, res, isOk) {
// Check if out of ring bounds, fallback to map
const outOfBounds = promiseId < nextPromiseId - RING_SIZE;
if (outOfBounds) {
Expand All @@ -153,7 +157,11 @@
throw "Missing promise in map @ " + promiseId;
}
MapPrototypeDelete(promiseMap, promiseId);
promise(res);
if (isOk) {
promise[0](res);
} else {
promise[1](res);
}
} else {
// Otherwise take from ring
const idx = promiseId % RING_SIZE;
Expand All @@ -162,7 +170,11 @@
throw "Missing promise in ring @ " + promiseId;
}
promiseRing[idx] = NO_PROMISE;
promise(res);
if (isOk) {
promise[0](res);
} else {
promise[1](res);
}
}
}

Expand All @@ -177,38 +189,6 @@
return promiseRing[idx] != NO_PROMISE;
}

function unwrapOpError() {
return (res) => {
// .$err_class_name is a special key that should only exist on errors
const className = res?.$err_class_name;
if (!className) {
return res;
}

const errorBuilder = errorMap[className];
const err = errorBuilder ? errorBuilder(res.message) : new Error(
`Unregistered error class: "${className}"\n ${res.message}\n Classes of errors returned from ops should be registered via Deno.core.registerErrorClass().`,
);

if (res.additional_properties) {
for (
const property of new SafeArrayIterator(res.additional_properties)
) {
const key = property[0];
if (!(key in err)) {
ObjectDefineProperty(err, key, {
value: property[1],
writable: false,
});
}
}
}
// Strip eventLoopTick() calls from stack trace
ErrorCaptureStackTrace(err, eventLoopTick);
throw err;
};
}

function setUpAsyncStub(opName, originalOp, maybeProto) {
let fn;

Expand Down
8 changes: 5 additions & 3 deletions core/01_core.js
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,12 @@
// responses of async ops.
function eventLoopTick() {
// First respond to all pending ops.
for (let i = 0; i < arguments.length - 3; i += 2) {
for (let i = 0; i < arguments.length - 3; i += 3) {
const promiseId = arguments[i];
const res = arguments[i + 1];
__resolvePromise(promiseId, res);
const isOk = arguments[i + 1];
const res = arguments[i + 2];

__resolvePromise(promiseId, res, isOk);
}
// Drain nextTick queue if there's a tick scheduled.
if (arguments[arguments.length - 1]) {
Expand Down
2 changes: 0 additions & 2 deletions core/error.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.

pub use super::modules::ModuleConcreteError;
pub use super::runtime::op_driver::OpError;
pub use super::runtime::op_driver::OpErrorWrapper;
pub use crate::io::ResourceError;
pub use crate::modules::ModuleLoaderError;
use crate::runtime::v8_static_strings;
Expand Down
2 changes: 1 addition & 1 deletion core/examples/hello_world.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use deno_core::*;
/// An op for summing an array of numbers. The op-layer automatically
/// deserializes inputs and serializes the returned Result & value.
#[op2]
fn op_sum(#[serde] nums: Vec<f64>) -> Result<f64, deno_core::error::OpError> {
fn op_sum(#[serde] nums: Vec<f64>) -> Result<f64, deno_error::JsErrorBox> {
// Sum inputs
let sum = nums.iter().fold(0.0, |a, v| a + v);
// return as a Result<f64, OpError>
Expand Down
3 changes: 1 addition & 2 deletions core/examples/op2.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
// Copyright 2018-2025 the Deno authors. MIT license.

use anyhow::Context;
use deno_core::error::OpError;
use deno_core::*;
use std::rc::Rc;

#[op2]
fn op_use_state(
state: &mut OpState,
#[global] callback: v8::Global<v8::Function>,
) -> Result<(), OpError> {
) -> Result<(), deno_error::JsErrorBox> {
state.put(callback);
Ok(())
}
Expand Down
109 changes: 68 additions & 41 deletions core/ops_builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

use crate::error::exception_to_err_result;
use crate::error::format_file_name;
use crate::error::OpError;
use crate::error::CoreError;
use crate::error::ResourceError;
use crate::io::AdaptiveBufferStrategy;
use crate::io::BufMutView;
use crate::io::BufView;
Expand Down Expand Up @@ -176,14 +177,14 @@ pub async fn op_void_async() {}

#[allow(clippy::unused_async)]
#[op2(async)]
pub async fn op_error_async() -> Result<(), OpError> {
Err(JsErrorBox::generic("error").into())
pub async fn op_error_async() -> Result<(), JsErrorBox> {
Err(JsErrorBox::generic("error"))
}

#[allow(clippy::unused_async)]
#[op2(async(deferred), fast)]
pub async fn op_error_async_deferred() -> Result<(), OpError> {
Err(JsErrorBox::generic("error").into())
pub async fn op_error_async_deferred() -> Result<(), JsErrorBox> {
Err(JsErrorBox::generic("error"))
}

#[allow(clippy::unused_async)]
Expand All @@ -195,7 +196,7 @@ pub async fn op_void_async_deferred() {}
pub fn op_close(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
) -> Result<(), OpError> {
) -> Result<(), ResourceError> {
let resource = state.borrow_mut().resource_table.take_any(rid)?;
resource.close();
Ok(())
Expand All @@ -212,7 +213,10 @@ pub fn op_try_close(state: Rc<RefCell<OpState>>, #[smi] rid: ResourceId) {

/// Builtin utility to print to stdout/stderr
#[op2(fast)]
pub fn op_print(#[string] msg: &str, is_err: bool) -> Result<(), OpError> {
pub fn op_print(
#[string] msg: &str,
is_err: bool,
) -> Result<(), std::io::Error> {
if is_err {
stderr().write_all(msg.as_bytes())?;
stderr().flush().unwrap();
Expand Down Expand Up @@ -244,7 +248,7 @@ pub fn op_wasm_streaming_feed(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] bytes: &[u8],
) -> Result<(), OpError> {
) -> Result<(), ResourceError> {
let wasm_streaming = state
.borrow_mut()
.resource_table
Expand All @@ -260,7 +264,7 @@ pub fn op_wasm_streaming_set_url(
state: &mut OpState,
#[smi] rid: ResourceId,
#[string] url: &str,
) -> Result<(), OpError> {
) -> Result<(), ResourceError> {
let wasm_streaming =
state.resource_table.get::<WasmStreamingResource>(rid)?;

Expand All @@ -274,23 +278,27 @@ async fn op_read(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] buf: JsBuffer,
) -> Result<u32, OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
) -> Result<u32, JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
let view = BufMutView::from(buf);
resource
.read_byob(view)
.await
.map(|(n, _)| n as u32)
.map_err(|err| err.into())
resource.read_byob(view).await.map(|(n, _)| n as u32)
}

#[op2(async)]
#[buffer]
async fn op_read_all(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
) -> Result<BytesMut, OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
) -> Result<BytesMut, JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;

let (min, maybe_max) = resource.size_hint();
let mut buffer_strategy =
Expand Down Expand Up @@ -324,8 +332,12 @@ async fn op_write(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] buf: JsBuffer,
) -> Result<u32, OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
) -> Result<u32, JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
let view = BufView::from(buf);
let resp = resource.write(view).await?;
Ok(resp.nwritten() as u32)
Expand All @@ -336,21 +348,26 @@ fn op_read_sync(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] data: &mut [u8],
) -> Result<u32, OpError> {
let resource = state.borrow_mut().resource_table.get_any(rid)?;
resource
.read_byob_sync(data)
.map(|n| n as u32)
.map_err(|err| err.into())
) -> Result<u32, JsErrorBox> {
let resource = state
.borrow_mut()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
resource.read_byob_sync(data).map(|n| n as u32)
}

#[op2(fast)]
fn op_write_sync(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] data: &[u8],
) -> Result<u32, OpError> {
let resource = state.borrow_mut().resource_table.get_any(rid)?;
) -> Result<u32, JsErrorBox> {
let resource = state
.borrow_mut()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
let nwritten = resource.write_sync(data)?;
Ok(nwritten as u32)
}
Expand All @@ -360,8 +377,12 @@ async fn op_write_all(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[buffer] buf: JsBuffer,
) -> Result<(), OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
) -> Result<(), JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
let view = BufView::from(buf);
resource.write_all(view).await?;
Ok(())
Expand All @@ -372,21 +393,27 @@ async fn op_write_type_error(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
#[string] error: String,
) -> Result<(), OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
resource
.write_error(&deno_error::JsErrorBox::type_error(error))
.await?;
) -> Result<(), JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
resource.write_error(&JsErrorBox::type_error(error)).await?;
Ok(())
}

#[op2(async)]
async fn op_shutdown(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
) -> Result<(), OpError> {
let resource = state.borrow().resource_table.get_any(rid)?;
resource.shutdown().await.map_err(|err| err.into())
) -> Result<(), JsErrorBox> {
let resource = state
.borrow()
.resource_table
.get_any(rid)
.map_err(JsErrorBox::from_err)?;
resource.shutdown().await
}

#[op2]
Expand Down Expand Up @@ -424,7 +451,7 @@ fn op_encode_binary_string(#[buffer] s: &[u8]) -> ByteString {
fn op_is_terminal(
state: &mut OpState,
#[smi] rid: ResourceId,
) -> Result<bool, OpError> {
) -> Result<bool, ResourceError> {
let handle = state.resource_table.get_handle(rid)?;
Ok(handle.is_terminal())
}
Expand All @@ -434,7 +461,7 @@ async fn do_load_job<'s>(
module_map_rc: Rc<ModuleMap>,
specifier: &str,
code: Option<String>,
) -> Result<ModuleId, OpError> {
) -> Result<ModuleId, CoreError> {
if let Some(code) = code {
module_map_rc
.new_es_module(scope, false, specifier.to_owned(), code, false, None)
Expand Down Expand Up @@ -554,7 +581,7 @@ fn op_import_sync<'s>(
scope: &mut v8::HandleScope<'s>,
#[string] specifier: &str,
#[string] code: Option<String>,
) -> Result<v8::Local<'s, v8::Value>, OpError> {
) -> Result<v8::Local<'s, v8::Value>, CoreError> {
let module_map_rc = JsRealm::module_map_from(scope);

// no js execution within block_on
Expand Down
Loading
Loading