Skip to content

Commit

Permalink
[Flight] Don't call onError/onPostpone when halting and unify error b…
Browse files Browse the repository at this point in the history
…ranches (#31715)

We shouldn't call onError/onPostpone when we halt a stream because that
node didn't error yet. Its digest would also get lost.

We also have a lot of error branches now for thenables and streams. This
unifies them under erroredTask. I'm not yet unifying the cases that
don't allocate a task for the error when those are outlined.
  • Loading branch information
sebmarkbage authored Dec 10, 2024
1 parent 3b597c0 commit 4a8fc0f
Show file tree
Hide file tree
Showing 5 changed files with 78 additions and 168 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2870,7 +2870,7 @@ describe('ReactFlightDOM', () => {
resolveGreeting();
const {prelude} = await pendingResult;

expect(errors).toEqual(['boom']);
expect(errors).toEqual([]);

const preludeWeb = Readable.toWeb(prelude);
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
Expand Down Expand Up @@ -3032,7 +3032,7 @@ describe('ReactFlightDOM', () => {

const {prelude} = await pendingResult;

expect(errors).toEqual(['boom']);
expect(errors).toEqual([]);

const preludeWeb = Readable.toWeb(prelude);
const response = ReactServerDOMClient.createFromReadableStream(preludeWeb);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2559,7 +2559,7 @@ describe('ReactFlightDOMBrowser', () => {
controller.abort('boom');
resolveGreeting();
const {prelude} = await pendingResult;
expect(errors).toEqual(['boom']);
expect(errors).toEqual([]);

function ClientRoot({response}) {
return use(response);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1209,7 +1209,7 @@ describe('ReactFlightDOMEdge', () => {
resolveGreeting();
const {prelude} = await pendingResult;

expect(errors).toEqual(['boom']);
expect(errors).toEqual([]);

function ClientRoot({response}) {
return use(response);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,7 @@ describe('ReactFlightDOMNode', () => {
controller.abort('boom');
resolveGreeting();
const {prelude} = await pendingResult;
expect(errors).toEqual(['boom']);
expect(errors).toEqual([]);

function ClientRoot({response}) {
return use(response);
Expand Down
236 changes: 73 additions & 163 deletions packages/react-server/src/ReactFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -629,21 +629,7 @@ function serializeThenable(
}
case 'rejected': {
const x = thenable.reason;
if (
enablePostpone &&
typeof x === 'object' &&
x !== null &&
(x: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (x: any);
logPostpone(request, postponeInstance.message, newTask);
emitPostponeChunk(request, newTask.id, postponeInstance);
} else {
const digest = logRecoverableError(request, x, null);
emitErrorChunk(request, newTask.id, digest, x);
}
newTask.status = ERRORED;
request.abortableTasks.delete(newTask);
erroredTask(request, newTask, x);
return newTask.id;
}
default: {
Expand Down Expand Up @@ -698,21 +684,7 @@ function serializeThenable(
// We expect that the only status it might be otherwise is ABORTED.
// When we abort we emit chunks in each pending task slot and don't need
// to do so again here.
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, newTask);
emitPostponeChunk(request, newTask.id, postponeInstance);
} else {
const digest = logRecoverableError(request, reason, newTask);
emitErrorChunk(request, newTask.id, digest, reason);
}
newTask.status = ERRORED;
request.abortableTasks.delete(newTask);
erroredTask(request, newTask, reason);
enqueueFlush(request);
}
},
Expand Down Expand Up @@ -795,8 +767,7 @@ function serializeReadableStream(
}
aborted = true;
request.abortListeners.delete(abortStream);
const digest = logRecoverableError(request, reason, streamTask);
emitErrorChunk(request, streamTask.id, digest, reason);
erroredTask(request, streamTask, reason);
enqueueFlush(request);

// $FlowFixMe should be able to pass mixed
Expand All @@ -808,30 +779,12 @@ function serializeReadableStream(
}
aborted = true;
request.abortListeners.delete(abortStream);
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, streamTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
}
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
const digest = logRecoverableError(request, reason, streamTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
}
erroredTask(request, streamTask, reason);
enqueueFlush(request);
}

// $FlowFixMe should be able to pass mixed
reader.cancel(reason).then(error, error);
}
Expand Down Expand Up @@ -937,8 +890,7 @@ function serializeAsyncIterable(
}
aborted = true;
request.abortListeners.delete(abortIterable);
const digest = logRecoverableError(request, reason, streamTask);
emitErrorChunk(request, streamTask.id, digest, reason);
erroredTask(request, streamTask, reason);
enqueueFlush(request);
if (typeof (iterator: any).throw === 'function') {
// The iterator protocol doesn't necessarily include this but a generator do.
Expand All @@ -952,28 +904,11 @@ function serializeAsyncIterable(
}
aborted = true;
request.abortListeners.delete(abortIterable);
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, streamTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, streamTask.id, postponeInstance);
enqueueFlush(request);
}
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
const digest = logRecoverableError(request, reason, streamTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitErrorChunk(request, streamTask.id, digest, reason);
enqueueFlush(request);
}
erroredTask(request, streamTask, reason);
enqueueFlush(request);
}
if (typeof (iterator: any).throw === 'function') {
// The iterator protocol doesn't necessarily include this but a generator do.
Expand Down Expand Up @@ -2281,8 +2216,7 @@ function serializeBlob(request: Request, blob: Blob): string {
}
aborted = true;
request.abortListeners.delete(abortBlob);
const digest = logRecoverableError(request, reason, newTask);
emitErrorChunk(request, newTask.id, digest, reason);
erroredTask(request, newTask, reason);
enqueueFlush(request);
// $FlowFixMe should be able to pass mixed
reader.cancel(reason).then(error, error);
Expand All @@ -2293,28 +2227,11 @@ function serializeBlob(request: Request, blob: Blob): string {
}
aborted = true;
request.abortListeners.delete(abortBlob);
if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, newTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitPostponeChunk(request, newTask.id, postponeInstance);
enqueueFlush(request);
}
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
const digest = logRecoverableError(request, reason, newTask);
if (enableHalt && request.type === PRERENDER) {
request.pendingChunks--;
} else {
emitErrorChunk(request, newTask.id, digest, reason);
enqueueFlush(request);
}
erroredTask(request, newTask, reason);
enqueueFlush(request);
}
// $FlowFixMe should be able to pass mixed
reader.cancel(reason).then(error, error);
Expand Down Expand Up @@ -2414,24 +2331,6 @@ function renderModel(
return serializeLazyID(newTask.id);
}
return serializeByValueID(newTask.id);
} else if (enablePostpone && x.$$typeof === REACT_POSTPONE_TYPE) {
// Something postponed. We'll still send everything we have up until this point.
// We'll replace this element with a lazy reference that postpones on the client.
const postponeInstance: Postpone = (x: any);
request.pendingChunks++;
const postponeId = request.nextChunkId++;
logPostpone(request, postponeInstance.message, task);
emitPostponeChunk(request, postponeId, postponeInstance);

// Restore the context. We assume that this will be restored by the inner
// functions in case nothing throws so we don't use "finally" here.
task.keyPath = prevKeyPath;
task.implicitSlot = prevImplicitSlot;

if (wasReactNode) {
return serializeLazyID(postponeId);
}
return serializeByValueID(postponeId);
}
}

Expand All @@ -2443,8 +2342,21 @@ function renderModel(
// Something errored. We'll still send everything we have up until this point.
request.pendingChunks++;
const errorId = request.nextChunkId++;
const digest = logRecoverableError(request, x, task);
emitErrorChunk(request, errorId, digest, x);
if (
enablePostpone &&
typeof x === 'object' &&
x !== null &&
x.$$typeof === REACT_POSTPONE_TYPE
) {
// Something postponed. We'll still send everything we have up until this point.
// We'll replace this element with a lazy reference that postpones on the client.
const postponeInstance: Postpone = (x: any);
logPostpone(request, postponeInstance.message, task);
emitPostponeChunk(request, errorId, postponeInstance);
} else {
const digest = logRecoverableError(request, x, task);
emitErrorChunk(request, errorId, digest, x);
}
if (wasReactNode) {
// We'll replace this element with a lazy reference that throws on the client
// once it gets rendered.
Expand Down Expand Up @@ -3964,6 +3876,24 @@ function emitChunk(
emitModelChunk(request, task.id, json);
}

function erroredTask(request: Request, task: Task, error: mixed): void {
request.abortableTasks.delete(task);
task.status = ERRORED;
if (
enablePostpone &&
typeof error === 'object' &&
error !== null &&
error.$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (error: any);
logPostpone(request, postponeInstance.message, task);
emitPostponeChunk(request, task.id, postponeInstance);
} else {
const digest = logRecoverableError(request, error, task);
emitErrorChunk(request, task.id, digest, error);
}
}

const emptyRoot = {};

function retryTask(request: Request, task: Task): void {
Expand Down Expand Up @@ -4083,20 +4013,9 @@ function retryTask(request: Request, task: Task): void {
const ping = task.ping;
x.then(ping, ping);
return;
} else if (enablePostpone && x.$$typeof === REACT_POSTPONE_TYPE) {
request.abortableTasks.delete(task);
task.status = ERRORED;
const postponeInstance: Postpone = (x: any);
logPostpone(request, postponeInstance.message, task);
emitPostponeChunk(request, task.id, postponeInstance);
return;
}
}

request.abortableTasks.delete(task);
task.status = ERRORED;
const digest = logRecoverableError(request, x, task);
emitErrorChunk(request, task.id, digest, x);
erroredTask(request, task, x);
} finally {
if (__DEV__) {
debugID = prevDebugID;
Expand Down Expand Up @@ -4336,29 +4255,27 @@ export function abort(request: Request, reason: mixed): void {
}
const abortableTasks = request.abortableTasks;
if (abortableTasks.size > 0) {
if (
if (enableHalt && request.type === PRERENDER) {
// When prerendering with halt semantics we simply halt the task
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
} else if (
enablePostpone &&
typeof reason === 'object' &&
reason !== null &&
(reason: any).$$typeof === REACT_POSTPONE_TYPE
) {
const postponeInstance: Postpone = (reason: any);
logPostpone(request, postponeInstance.message, null);
if (enableHalt && request.type === PRERENDER) {
// When prerendering with halt semantics we simply halt the task
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
} else {
// When rendering we produce a shared postpone chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitPostponeChunk(request, errorId, postponeInstance);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
// When rendering we produce a shared postpone chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitPostponeChunk(request, errorId, postponeInstance);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
} else {
const error =
reason === undefined
Expand All @@ -4373,21 +4290,14 @@ export function abort(request: Request, reason: mixed): void {
)
: reason;
const digest = logRecoverableError(request, error, null);
if (enableHalt && request.type === PRERENDER) {
// When prerendering with halt semantics we simply halt the task
// and leave the reference unfulfilled.
abortableTasks.forEach(task => haltTask(task, request));
abortableTasks.clear();
} else {
// When rendering we produce a shared error chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitErrorChunk(request, errorId, digest, error);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
// When rendering we produce a shared error chunk and then
// fulfill each task with a reference to that chunk.
const errorId = request.nextChunkId++;
request.fatalError = errorId;
request.pendingChunks++;
emitErrorChunk(request, errorId, digest, error);
abortableTasks.forEach(task => abortTask(task, request, errorId));
abortableTasks.clear();
}
const onAllReady = request.onAllReady;
onAllReady();
Expand Down

0 comments on commit 4a8fc0f

Please sign in to comment.