Skip to content

Commit

Permalink
fix(server): improve too big batch response msg (#1107)
Browse files Browse the repository at this point in the history
* fix(server): improve too big batch response msg

* adjust tests

* fix tests
  • Loading branch information
niklasad1 authored May 2, 2023
1 parent daeec72 commit bef63ae
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 8 deletions.
8 changes: 5 additions & 3 deletions core/src/server/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ use std::io;
use std::time::Duration;

use crate::tracing::tx_log_from_str;
use jsonrpsee_types::error::{ErrorCode, ErrorObject, OVERSIZED_RESPONSE_CODE, OVERSIZED_RESPONSE_MSG};
use jsonrpsee_types::error::{
reject_too_big_batch_response, ErrorCode, ErrorObject, OVERSIZED_RESPONSE_CODE, OVERSIZED_RESPONSE_MSG,
};
use jsonrpsee_types::{Id, InvalidRequest, Response, ResponsePayload};
use serde::Serialize;
use serde_json::value::to_raw_value;
Expand Down Expand Up @@ -268,7 +270,7 @@ impl BatchResponseBuilder {
let len = response.result.len() + self.result.len() + 1;

if len > self.max_response_size {
Err(batch_response_error(Id::Null, ErrorObject::from(ErrorCode::InvalidRequest)))
Err(batch_response_error(Id::Null, reject_too_big_batch_response(self.max_response_size)))
} else {
self.result.push_str(&response.result);
self.result.push(',');
Expand Down Expand Up @@ -365,7 +367,7 @@ mod tests {

let batch = BatchResponseBuilder::new_with_limit(63).append(&method).unwrap_err();

let exp_err = r#"{"jsonrpc":"2.0","error":{"code":-32600,"message":"Invalid request"},"id":null}"#;
let exp_err = r#"{"jsonrpc":"2.0","error":{"code":-32011,"message":"The batch response was too large","data":"Exceeded max limit of 63"},"id":null}"#;
assert_eq!(batch, exp_err);
}
}
2 changes: 1 addition & 1 deletion server/src/tests/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,7 @@ async fn can_set_the_max_response_size_to_batch() {
// Two response will end up in a response of 102 bytes which is too big.
let req = r#"[{"jsonrpc":"2.0", "method":"anything", "id":1},{"jsonrpc":"2.0", "method":"anything", "id":2}]"#;
let response = http_request(req.into(), uri.clone()).with_default_timeout().await.unwrap().unwrap();
assert_eq!(response.body, invalid_request(Id::Null));
assert_eq!(response.body, batch_response_too_large(100));

handle.stop().unwrap();
handle.stopped().await;
Expand Down
2 changes: 1 addition & 1 deletion server/src/tests/ws.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ async fn can_set_the_max_response_size_to_batch() {
// Two response will end up in a response bigger than 100 bytes.
let req = r#"[{"jsonrpc":"2.0", "method":"anything", "id":1},{"jsonrpc":"2.0", "method":"anything", "id":2}]"#;
let response = client.send_request_text(req).await.unwrap();
assert_eq!(response, invalid_request(Id::Null));
assert_eq!(response, batch_response_too_large(100));

server_handle.stop().unwrap();
server_handle.stopped().await;
Expand Down
6 changes: 6 additions & 0 deletions test-utils/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,12 @@ pub fn batches_too_large(max_limit: usize) -> String {
)
}

pub fn batch_response_too_large(max_limit: usize) -> String {
format!(
r#"{{"jsonrpc":"2.0","error":{{"code":-32011,"message":"The batch response was too large","data":"Exceeded max limit of {max_limit}"}},"id":null}}"#
)
}

pub fn oversized_response(id: Id, max_limit: u32) -> String {
format!(
r#"{{"jsonrpc":"2.0","error":{{"code":-32008,"message":"Response is too big","data":"Exceeded max limit of {}"}},"id":{}}}"#,
Expand Down
23 changes: 20 additions & 3 deletions types/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,9 @@ pub const OVERSIZED_RESPONSE_CODE: i32 = -32008;
/// Server is busy error code.
pub const SERVER_IS_BUSY_CODE: i32 = -32009;
/// Batch request limit was exceed.
pub const TOO_BIG_BATCH_CODE: i32 = -32010;
pub const TOO_BIG_BATCH_REQUEST_CODE: i32 = -32010;
/// Batch request limit was exceed.
pub const TOO_BIG_BATCH_RESPONSE_CODE: i32 = -32011;

/// Parse error message
pub const PARSE_ERROR_MSG: &str = "Parse error";
Expand All @@ -160,7 +162,9 @@ pub const BATCHES_NOT_SUPPORTED_MSG: &str = "Batched requests are not supported
/// Subscription limit per connection was exceeded.
pub const TOO_MANY_SUBSCRIPTIONS_MSG: &str = "Too many subscriptions on the connection";
/// Batched requests limit was exceed.
pub const TOO_BIG_BATCH_MSG: &str = "The batch request was too large";
pub const TOO_BIG_BATCH_REQUEST_MSG: &str = "The batch request was too large";
/// Batch request response limit was exceed.
pub const TOO_BIG_BATCH_RESPONSE_MSG: &str = "The batch response was too large";

/// JSONRPC error code
#[derive(Error, Debug, PartialEq, Eq, Copy, Clone)]
Expand Down Expand Up @@ -276,7 +280,20 @@ pub fn reject_too_big_request(limit: u32) -> ErrorObjectOwned {

/// Helper to get a `JSON-RPC` error object when the maximum batch request size have been exceeded.
pub fn reject_too_big_batch_request(limit: usize) -> ErrorObjectOwned {
ErrorObjectOwned::owned(TOO_BIG_BATCH_CODE, TOO_BIG_BATCH_MSG, Some(format!("Exceeded max limit of {limit}")))
ErrorObjectOwned::owned(
TOO_BIG_BATCH_REQUEST_CODE,
TOO_BIG_BATCH_REQUEST_MSG,
Some(format!("Exceeded max limit of {limit}")),
)
}

/// Helper to get a `JSON-RPC` error object when the maximum batch response size have been exceeded.
pub fn reject_too_big_batch_response(limit: usize) -> ErrorObjectOwned {
ErrorObjectOwned::owned(
TOO_BIG_BATCH_RESPONSE_CODE,
TOO_BIG_BATCH_RESPONSE_MSG,
Some(format!("Exceeded max limit of {limit}")),
)
}

#[cfg(test)]
Expand Down

5 comments on commit bef63ae

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: bef63ae Previous: 8b0058d Ratio
sync/ws_concurrent_conn_calls/fast_call/2 1165955 ns/iter (± 82489) 192610 ns/iter (± 3646) 6.05
sync/ws_concurrent_conn_calls/fast_call/4 1161539 ns/iter (± 101989) 254859 ns/iter (± 4590) 4.56
sync/ws_concurrent_conn_calls/fast_call/8 1112005 ns/iter (± 117352) 423527 ns/iter (± 9348) 2.63
async/ws_concurrent_conn_calls/fast_call/2 1087334 ns/iter (± 87968) 193023 ns/iter (± 2826) 5.63
async/ws_concurrent_conn_calls/fast_call/4 1180405 ns/iter (± 59814) 256149 ns/iter (± 4379) 4.61
async/ws_concurrent_conn_calls/fast_call/8 1153883 ns/iter (± 116479) 424520 ns/iter (± 11700) 2.72

This comment was automatically generated by workflow using github-action-benchmark.

CC: @niklasad1

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: bef63ae Previous: 8b0058d Ratio
sync/ws_concurrent_conn_calls/fast_call/2 1133413 ns/iter (± 126260) 192610 ns/iter (± 3646) 5.88
sync/ws_concurrent_conn_calls/fast_call/4 1174382 ns/iter (± 58189) 254859 ns/iter (± 4590) 4.61
sync/ws_concurrent_conn_calls/fast_call/8 1127275 ns/iter (± 107020) 423527 ns/iter (± 9348) 2.66
async/ws_concurrent_conn_calls/fast_call/2 1009877 ns/iter (± 127309) 193023 ns/iter (± 2826) 5.23
async/ws_concurrent_conn_calls/fast_call/4 1174060 ns/iter (± 58072) 256149 ns/iter (± 4379) 4.58
async/ws_concurrent_conn_calls/fast_call/8 1116079 ns/iter (± 130231) 424520 ns/iter (± 11700) 2.63

This comment was automatically generated by workflow using github-action-benchmark.

CC: @niklasad1

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: bef63ae Previous: 8b0058d Ratio
sync/ws_concurrent_conn_calls/fast_call/2 1121741 ns/iter (± 78994) 192610 ns/iter (± 3646) 5.82
sync/ws_concurrent_conn_calls/fast_call/4 1194433 ns/iter (± 61987) 254859 ns/iter (± 4590) 4.69
sync/ws_concurrent_conn_calls/fast_call/8 1099284 ns/iter (± 142751) 423527 ns/iter (± 9348) 2.60
async/ws_concurrent_conn_calls/fast_call/2 1097942 ns/iter (± 84371) 193023 ns/iter (± 2826) 5.69
async/ws_concurrent_conn_calls/fast_call/4 1203335 ns/iter (± 70497) 256149 ns/iter (± 4379) 4.70
async/ws_concurrent_conn_calls/fast_call/8 1110387 ns/iter (± 157573) 424520 ns/iter (± 11700) 2.62

This comment was automatically generated by workflow using github-action-benchmark.

CC: @niklasad1

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: bef63ae Previous: 8b0058d Ratio
sync/ws_concurrent_conn_calls/fast_call/2 1046872 ns/iter (± 95662) 192610 ns/iter (± 3646) 5.44
sync/ws_concurrent_conn_calls/fast_call/4 1231967 ns/iter (± 68142) 254859 ns/iter (± 4590) 4.83
sync/ws_concurrent_conn_calls/fast_call/8 1195820 ns/iter (± 81273) 423527 ns/iter (± 9348) 2.82
async/ws_concurrent_conn_calls/fast_call/2 1030644 ns/iter (± 124490) 193023 ns/iter (± 2826) 5.34
async/ws_concurrent_conn_calls/fast_call/4 1198796 ns/iter (± 50759) 256149 ns/iter (± 4379) 4.68
async/ws_concurrent_conn_calls/fast_call/8 1155512 ns/iter (± 112261) 424520 ns/iter (± 11700) 2.72

This comment was automatically generated by workflow using github-action-benchmark.

CC: @niklasad1

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: bef63ae Previous: 8b0058d Ratio
sync/ws_concurrent_conn_calls/fast_call/2 1111810 ns/iter (± 98806) 192610 ns/iter (± 3646) 5.77
sync/ws_concurrent_conn_calls/fast_call/4 1295763 ns/iter (± 70623) 254859 ns/iter (± 4590) 5.08
sync/ws_concurrent_conn_calls/fast_call/8 1184524 ns/iter (± 103065) 423527 ns/iter (± 9348) 2.80
async/ws_concurrent_conn_calls/fast_call/2 1076510 ns/iter (± 115295) 193023 ns/iter (± 2826) 5.58
async/ws_concurrent_conn_calls/fast_call/4 1366269 ns/iter (± 62846) 256149 ns/iter (± 4379) 5.33
async/ws_concurrent_conn_calls/fast_call/8 1215158 ns/iter (± 85464) 424520 ns/iter (± 11700) 2.86

This comment was automatically generated by workflow using github-action-benchmark.

CC: @niklasad1

Please sign in to comment.