mirror of
https://github.com/facebook/react.git
synced 2026-02-21 19:31:52 +00:00
[Flight] Walk parsed JSON instead of using reviver for parsing RSC payload (#35776)
## Summary Follow-up to https://github.com/vercel/next.js/pull/89823 with the actual changes to React. Replaces the `JSON.parse` reviver callback in `initializeModelChunk` with a two-step approach: plain `JSON.parse()` followed by a recursive `reviveModel()` post-process (same as in Flight Reply Server). This yields a **~75% speedup** in RSC chunk deserialization. | Payload | Original (ms) | Walk (ms) | Speedup | |---------|---------------|-----------|---------| | Small (2 elements, 142B) | 0.0024 | 0.0007 | **+72%** | | Medium (~12 elements, 914B) | 0.0116 | 0.0031 | **+73%** | | Large (~90 elements, 16.7KB) | 0.1836 | 0.0451 | **+75%** | | XL (~200 elements, 25.7KB) | 0.3742 | 0.0913 | **+76%** | | Table (1000 rows, 110KB) | 3.0862 | 0.6887 | **+78%** | ## Problem `createFromJSONCallback` returns a reviver function passed as the second argument to `JSON.parse()`. This reviver is called for **every key-value pair** in the parsed JSON. While the logic inside the reviver is lightweight, the dominant cost is the **C++ → JavaScript boundary crossing** — V8's `JSON.parse` is implemented in C++, and calling back into JavaScript for every node incurs significant overhead. Even a trivial no-op reviver `(k, v) => v` makes `JSON.parse` **~4x slower** than bare `JSON.parse` without a reviver: ``` 108 KB payload: Bare JSON.parse: 0.60 ms Trivial reviver: 2.95 ms (+391%) ``` ## Change Replace the reviver with a two-step process: 1. `JSON.parse(resolvedModel)` — parse the entire payload in C++ with no callbacks 2. `reviveModel` — recursively walk the resulting object in pure JavaScript to apply RSC transformations The `reviveModel` function includes additional optimizations over the original reviver: - **Short-circuits plain strings**: only calls `parseModelString` when the string starts with `$`, skipping the vast majority of strings (class names, text content, etc.) - **Stays entirely in JavaScript** — no C++ boundary crossings during the walk ## Results You can find the related applications in the [Next.js PR ](https://github.com/vercel/next.js/pull/89823)as I've been testing this on Next.js applications. ### Table as Server Component with 1000 items Before: ``` "min": 13.782875000000786, "max": 22.23400000000038, "avg": 17.116868530000083, "p50": 17.10766700000022, "p75": 18.50787499999933, "p95": 20.426249999998618, "p99": 21.814125000000786 ``` After: ``` "min": 10.963916999999128, "max": 18.096083000000363, "avg": 13.543286884999988, "p50": 13.58350000000064, "p75": 14.871791999999914, "p95": 16.08429099999921, "p99": 17.591458000000785 ``` ### Table as Client Component with 1000 items Before: ``` "min": 3.888875000000553, "max": 9.044959000000745, "avg": 4.651271475000067, "p50": 4.555749999999534, "p75": 4.966624999999112, "p95": 5.47754200000054, "p99": 6.109499999998661 ```` After: ``` "min": 3.5986250000005384, "max": 5.374291000000085, "avg": 4.142990245000046, "p50": 4.10570799999914, "p75": 4.392041999999492, "p95": 4.740084000000934, "p99": 5.1652500000000146 ``` ### Nested Suspense Before: ``` Requests: 200 Min: 73ms Max: 106ms Avg: 78ms P50: 77ms P75: 80ms P95: 85ms P99: 94ms ``` After: ``` Requests: 200 Min: 56ms Max: 67ms Avg: 59ms P50: 58ms P75: 60ms P95: 65ms P99: 66ms ``` ### Even more nested Suspense (double-level Suspense) Before: ``` Requests: 200 Min: 159ms Max: 208ms Avg: 169ms P50: 167ms P75: 173ms P95: 183ms P99: 188ms ``` After: ``` Requests: 200 Min: 125ms Max: 170ms Avg: 134ms P50: 132ms P75: 138ms P95: 148ms P99: 160ms ``` ## How did you test this change? Ran it across many Next.js benchmark applications. The entire Next.js test suite passes with this change. --------- Co-authored-by: Hendrik Liebau <mail@hendrik-liebau.de>
This commit is contained in:
65
packages/react-client/src/ReactFlightClient.js
vendored
65
packages/react-client/src/ReactFlightClient.js
vendored
@@ -355,7 +355,6 @@ type Response = {
|
|||||||
_encodeFormAction: void | EncodeFormActionCallback,
|
_encodeFormAction: void | EncodeFormActionCallback,
|
||||||
_nonce: ?string,
|
_nonce: ?string,
|
||||||
_chunks: Map<number, SomeChunk<any>>,
|
_chunks: Map<number, SomeChunk<any>>,
|
||||||
_fromJSON: (key: string, value: JSONValue) => any,
|
|
||||||
_stringDecoder: StringDecoder,
|
_stringDecoder: StringDecoder,
|
||||||
_closed: boolean,
|
_closed: boolean,
|
||||||
_closedReason: mixed,
|
_closedReason: mixed,
|
||||||
@@ -2302,6 +2301,11 @@ function defineLazyGetter<T>(
|
|||||||
// TODO: We should ideally throw here to indicate a difference.
|
// TODO: We should ideally throw here to indicate a difference.
|
||||||
return OMITTED_PROP_ERROR;
|
return OMITTED_PROP_ERROR;
|
||||||
},
|
},
|
||||||
|
// no-op: the walk function may try to reassign this property after
|
||||||
|
// parseModelString returns. With the JSON.parse reviver, the engine's
|
||||||
|
// internal CreateDataProperty silently failed. We use a no-op setter
|
||||||
|
// to match that behavior in strict mode.
|
||||||
|
set: function () {},
|
||||||
enumerable: true,
|
enumerable: true,
|
||||||
configurable: false,
|
configurable: false,
|
||||||
});
|
});
|
||||||
@@ -2622,6 +2626,11 @@ function parseModelString(
|
|||||||
// TODO: We should ideally throw here to indicate a difference.
|
// TODO: We should ideally throw here to indicate a difference.
|
||||||
return OMITTED_PROP_ERROR;
|
return OMITTED_PROP_ERROR;
|
||||||
},
|
},
|
||||||
|
// no-op: the walk function may try to reassign this property
|
||||||
|
// after parseModelString returns. With the JSON.parse reviver,
|
||||||
|
// the engine's internal CreateDataProperty silently failed.
|
||||||
|
// We use a no-op setter to match that behavior in strict mode.
|
||||||
|
set: function () {},
|
||||||
enumerable: true,
|
enumerable: true,
|
||||||
configurable: false,
|
configurable: false,
|
||||||
});
|
});
|
||||||
@@ -2699,7 +2708,6 @@ function ResponseInstance(
|
|||||||
this._nonce = nonce;
|
this._nonce = nonce;
|
||||||
this._chunks = chunks;
|
this._chunks = chunks;
|
||||||
this._stringDecoder = createStringDecoder();
|
this._stringDecoder = createStringDecoder();
|
||||||
this._fromJSON = (null: any);
|
|
||||||
this._closed = false;
|
this._closed = false;
|
||||||
this._closedReason = null;
|
this._closedReason = null;
|
||||||
this._allowPartialStream = allowPartialStream;
|
this._allowPartialStream = allowPartialStream;
|
||||||
@@ -2783,9 +2791,6 @@ function ResponseInstance(
|
|||||||
markAllTracksInOrder();
|
markAllTracksInOrder();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Don't inline this call because it causes closure to outline the call above.
|
|
||||||
this._fromJSON = createFromJSONCallback(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createResponse(
|
export function createResponse(
|
||||||
@@ -5259,24 +5264,52 @@ export function processStringChunk(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function parseModel<T>(response: Response, json: UninitializedModel): T {
|
function parseModel<T>(response: Response, json: UninitializedModel): T {
|
||||||
return JSON.parse(json, response._fromJSON);
|
const rawModel = JSON.parse(json);
|
||||||
|
// Pass a wrapper object as parentObject to match the original JSON.parse
|
||||||
|
// reviver behavior, where the root value's reviver receives {"": rootValue}
|
||||||
|
// as `this`. This ensures parentObject is never null when accessed downstream.
|
||||||
|
return reviveModel(response, rawModel, {'': rawModel}, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
function createFromJSONCallback(response: Response) {
|
function reviveModel(
|
||||||
// $FlowFixMe[missing-this-annot]
|
response: Response,
|
||||||
return function (key: string, value: JSONValue) {
|
value: JSONValue,
|
||||||
if (key === __PROTO__) {
|
parentObject: Object,
|
||||||
return undefined;
|
key: string,
|
||||||
|
): any {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
if (value[0] === '$') {
|
||||||
|
return parseModelString(response, parentObject, key, value);
|
||||||
}
|
}
|
||||||
if (typeof value === 'string') {
|
return value;
|
||||||
// We can't use .bind here because we need the "this" value.
|
}
|
||||||
return parseModelString(response, this, key, value);
|
if (typeof value !== 'object' || value === null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
if (isArray(value)) {
|
||||||
|
for (let i = 0; i < value.length; i++) {
|
||||||
|
(value: any)[i] = reviveModel(response, value[i], value, '' + i);
|
||||||
}
|
}
|
||||||
if (typeof value === 'object' && value !== null) {
|
if (value[0] === REACT_ELEMENT_TYPE) {
|
||||||
|
// React element tuple
|
||||||
return parseModelTuple(response, value);
|
return parseModelTuple(response, value);
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
};
|
}
|
||||||
|
// Plain object
|
||||||
|
for (const k in value) {
|
||||||
|
if (k === __PROTO__) {
|
||||||
|
delete (value: any)[k];
|
||||||
|
} else {
|
||||||
|
const walked = reviveModel(response, (value: any)[k], value, k);
|
||||||
|
if (walked !== undefined) {
|
||||||
|
(value: any)[k] = walked;
|
||||||
|
} else {
|
||||||
|
delete (value: any)[k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function close(weakResponse: WeakResponse): void {
|
export function close(weakResponse: WeakResponse): void {
|
||||||
|
|||||||
@@ -43,9 +43,6 @@ const {createResponse, createStreamState, processBinaryChunk, getRoot, close} =
|
|||||||
requireModule(idx: string) {
|
requireModule(idx: string) {
|
||||||
return readModule(idx);
|
return readModule(idx);
|
||||||
},
|
},
|
||||||
parseModel(response: Response, json) {
|
|
||||||
return JSON.parse(json, response._fromJSON);
|
|
||||||
},
|
|
||||||
bindToConsole(methodName, args, badgeName) {
|
bindToConsole(methodName, args, badgeName) {
|
||||||
return Function.prototype.bind.apply(
|
return Function.prototype.bind.apply(
|
||||||
// eslint-disable-next-line react-internal/no-production-logging
|
// eslint-disable-next-line react-internal/no-production-logging
|
||||||
|
|||||||
Reference in New Issue
Block a user