Skip to content

Commit

Permalink
restore next14 sync base branch (#31841)
Browse files Browse the repository at this point in the history
The original commit for nextjs 14 is
14898b6 but since that was lost in the
facebook repo of React when the next nextjs-sync branch was created it
is being restored
  • Loading branch information
gnoff authored Dec 18, 2024
1 parent 6c3b8db commit 1db6222
Show file tree
Hide file tree
Showing 10 changed files with 77 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,6 @@ export function typedArrayToBinaryChunk(
throw new Error('Not implemented.');
}

export function clonePrecomputedChunk(
chunk: PrecomputedChunk,
): PrecomputedChunk {
return chunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
throw new Error('Not implemented.');
}
Expand Down
7 changes: 2 additions & 5 deletions packages/react-dom-bindings/src/server/ReactFizzConfigDOM.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ import {
writeChunkAndReturn,
stringToChunk,
stringToPrecomputedChunk,
clonePrecomputedChunk,
} from 'react-server/src/ReactServerStreamConfig';
import {
resolveRequest,
Expand Down Expand Up @@ -4227,15 +4226,13 @@ export function writeCompletedBoundaryInstruction(
) {
resumableState.instructions |=
SentStyleInsertionFunction | SentCompleteBoundaryFunction;
writeChunk(
destination,
clonePrecomputedChunk(completeBoundaryWithStylesScript1FullBoth),
);
writeChunk(destination, completeBoundaryWithStylesScript1FullBoth);
} else if (
(resumableState.instructions & SentStyleInsertionFunction) ===
NothingSent
) {
resumableState.instructions |= SentStyleInsertionFunction;

writeChunk(destination, completeBoundaryWithStylesScript1FullPartial);
} else {
writeChunk(destination, completeBoundaryWithStylesScript1Partial);
Expand Down
59 changes: 59 additions & 0 deletions packages/react-dom/src/__tests__/ReactDOMFloat-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -731,6 +731,65 @@ describe('ReactDOMFloat', () => {
).toEqual(['<script src="src-of-external-runtime" async=""></script>']);
});

// @gate enableFloat
it('can send style insertion implementation independent of boundary commpletion instruction implementation', async () => {
await act(() => {
renderToPipeableStream(
<html>
<body>
<Suspense fallback="loading foo...">
<BlockedOn value="foo">foo</BlockedOn>
</Suspense>
<Suspense fallback="loading bar...">
<BlockedOn value="bar">
<link rel="stylesheet" href="bar" precedence="bar" />
bar
</BlockedOn>
</Suspense>
</body>
</html>,
).pipe(writable);
});

expect(getMeaningfulChildren(document)).toEqual(
<html>
<head />
<body>
{'loading foo...'}
{'loading bar...'}
</body>
</html>,
);

await act(() => {
resolveText('foo');
});
expect(getMeaningfulChildren(document)).toEqual(
<html>
<head />
<body>
foo
{'loading bar...'}
</body>
</html>,
);
await act(() => {
resolveText('bar');
});
expect(getMeaningfulChildren(document)).toEqual(
<html>
<head>
<link rel="stylesheet" href="bar" data-precedence="bar" />
</head>
<body>
foo
{'loading bar...'}
<link rel="preload" href="bar" as="style" />
</body>
</html>,
);
});

// @gate enableFloat
it('can avoid inserting a late stylesheet if it already rendered on the client', async () => {
await act(() => {
Expand Down
3 changes: 0 additions & 3 deletions packages/react-noop-renderer/src/ReactNoopFlightServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,6 @@ const ReactNoopFlightServer = ReactFlightServer({
stringToPrecomputedChunk(content: string): Uint8Array {
return textEncoder.encode(content);
},
clonePrecomputedChunk(chunk: Uint8Array): Uint8Array {
return chunk;
},
isClientReference(reference: Object): boolean {
return reference.$$typeof === Symbol.for('react.client.reference');
},
Expand Down
29 changes: 6 additions & 23 deletions packages/react-server/src/ReactServerStreamConfigBrowser.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export function flushBuffered(destination: Destination) {
// transform streams. https://github.com/whatwg/streams/issues/960
}

const VIEW_SIZE = 512;
const VIEW_SIZE = 2048;
let currentView = null;
let writtenBytes = 0;

Expand All @@ -40,15 +40,6 @@ export function writeChunk(
}

if (chunk.byteLength > VIEW_SIZE) {
if (__DEV__) {
if (precomputedChunkSet.has(chunk)) {
console.error(
'A large precomputed chunk was passed to writeChunk without being copied.' +
' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' +
' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.',
);
}
}
// this chunk may overflow a single view which implies it was not
// one that is cached by the streaming renderer. We will enqueu
// it directly and expect it is not re-used
Expand Down Expand Up @@ -120,15 +111,15 @@ export function stringToChunk(content: string): Chunk {
return textEncoder.encode(content);
}

const precomputedChunkSet: Set<Chunk | BinaryChunk> = __DEV__
? new Set()
: (null: any);

export function stringToPrecomputedChunk(content: string): PrecomputedChunk {
const precomputedChunk = textEncoder.encode(content);

if (__DEV__) {
precomputedChunkSet.add(precomputedChunk);
if (precomputedChunk.byteLength > VIEW_SIZE) {
console.error(
'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.',
);
}
}

return precomputedChunk;
Expand All @@ -151,14 +142,6 @@ export function typedArrayToBinaryChunk(
return content.byteLength > VIEW_SIZE ? buffer.slice() : buffer;
}

export function clonePrecomputedChunk(
precomputedChunk: PrecomputedChunk,
): PrecomputedChunk {
return precomputedChunk.byteLength > VIEW_SIZE
? precomputedChunk.slice()
: precomputedChunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
return chunk.byteLength;
}
Expand Down
6 changes: 0 additions & 6 deletions packages/react-server/src/ReactServerStreamConfigBun.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,6 @@ export function typedArrayToBinaryChunk(
return content;
}

export function clonePrecomputedChunk(
chunk: PrecomputedChunk,
): PrecomputedChunk {
return chunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
return Buffer.byteLength(chunk, 'utf8');
}
Expand Down
29 changes: 6 additions & 23 deletions packages/react-server/src/ReactServerStreamConfigEdge.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export function flushBuffered(destination: Destination) {
// transform streams. https://github.com/whatwg/streams/issues/960
}

const VIEW_SIZE = 512;
const VIEW_SIZE = 2048;
let currentView = null;
let writtenBytes = 0;

Expand All @@ -40,15 +40,6 @@ export function writeChunk(
}

if (chunk.byteLength > VIEW_SIZE) {
if (__DEV__) {
if (precomputedChunkSet.has(chunk)) {
console.error(
'A large precomputed chunk was passed to writeChunk without being copied.' +
' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' +
' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.',
);
}
}
// this chunk may overflow a single view which implies it was not
// one that is cached by the streaming renderer. We will enqueu
// it directly and expect it is not re-used
Expand Down Expand Up @@ -120,15 +111,15 @@ export function stringToChunk(content: string): Chunk {
return textEncoder.encode(content);
}

const precomputedChunkSet: Set<Chunk | BinaryChunk> = __DEV__
? new Set()
: (null: any);

export function stringToPrecomputedChunk(content: string): PrecomputedChunk {
const precomputedChunk = textEncoder.encode(content);

if (__DEV__) {
precomputedChunkSet.add(precomputedChunk);
if (precomputedChunk.byteLength > VIEW_SIZE) {
console.error(
'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.',
);
}
}

return precomputedChunk;
Expand All @@ -151,14 +142,6 @@ export function typedArrayToBinaryChunk(
return content.byteLength > VIEW_SIZE ? buffer.slice() : buffer;
}

export function clonePrecomputedChunk(
precomputedChunk: PrecomputedChunk,
): PrecomputedChunk {
return precomputedChunk.byteLength > VIEW_SIZE
? precomputedChunk.slice()
: precomputedChunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
return chunk.byteLength;
}
Expand Down
6 changes: 0 additions & 6 deletions packages/react-server/src/ReactServerStreamConfigFB.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,6 @@ export function typedArrayToBinaryChunk(
throw new Error('Not implemented.');
}

export function clonePrecomputedChunk(
chunk: PrecomputedChunk,
): PrecomputedChunk {
return chunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
throw new Error('Not implemented.');
}
Expand Down
25 changes: 4 additions & 21 deletions packages/react-server/src/ReactServerStreamConfigNode.js
Original file line number Diff line number Diff line change
Expand Up @@ -99,15 +99,6 @@ function writeViewChunk(
return;
}
if (chunk.byteLength > VIEW_SIZE) {
if (__DEV__) {
if (precomputedChunkSet && precomputedChunkSet.has(chunk)) {
console.error(
'A large precomputed chunk was passed to writeChunk without being copied.' +
' Large chunks get enqueued directly and are not copied. This is incompatible with precomputed chunks because you cannot enqueue the same precomputed chunk twice.' +
' Use "cloneChunk" to make a copy of this large precomputed chunk before writing it. This is a bug in React.',
);
}
}
// this chunk may overflow a single view which implies it was not
// one that is cached by the streaming renderer. We will enqueu
// it directly and expect it is not re-used
Expand Down Expand Up @@ -201,14 +192,14 @@ export function stringToChunk(content: string): Chunk {
return content;
}

const precomputedChunkSet = __DEV__ ? new Set<PrecomputedChunk>() : null;

export function stringToPrecomputedChunk(content: string): PrecomputedChunk {
const precomputedChunk = textEncoder.encode(content);

if (__DEV__) {
if (precomputedChunkSet) {
precomputedChunkSet.add(precomputedChunk);
if (precomputedChunk.byteLength > VIEW_SIZE) {
console.error(
'precomputed chunks must be smaller than the view size configured for this host. This is a bug in React.',
);
}
}

Expand All @@ -222,14 +213,6 @@ export function typedArrayToBinaryChunk(
return new Uint8Array(content.buffer, content.byteOffset, content.byteLength);
}

export function clonePrecomputedChunk(
precomputedChunk: PrecomputedChunk,
): PrecomputedChunk {
return precomputedChunk.length > VIEW_SIZE
? precomputedChunk.slice()
: precomputedChunk;
}

export function byteLengthOfChunk(chunk: Chunk | PrecomputedChunk): number {
return typeof chunk === 'string'
? Buffer.byteLength(chunk, 'utf8')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ export const closeWithError = $$$config.closeWithError;
export const stringToChunk = $$$config.stringToChunk;
export const stringToPrecomputedChunk = $$$config.stringToPrecomputedChunk;
export const typedArrayToBinaryChunk = $$$config.typedArrayToBinaryChunk;
export const clonePrecomputedChunk = $$$config.clonePrecomputedChunk;
export const byteLengthOfChunk = $$$config.byteLengthOfChunk;
export const byteLengthOfBinaryChunk = $$$config.byteLengthOfBinaryChunk;
export const createFastHash = $$$config.createFastHash;

0 comments on commit 1db6222

Please sign in to comment.