Skip to content

Commit

Permalink
allow to set chunk size per instance
Browse files Browse the repository at this point in the history
  • Loading branch information
vadimkibana committed Apr 23, 2024
1 parent 5f1dd38 commit 2b4bb1b
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 16 deletions.
8 changes: 2 additions & 6 deletions x-pack/plugins/reporting/server/lib/content_stream.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -280,9 +280,7 @@ describe('ContentStream', () => {
});

it('should split raw data into chunks', async () => {
client.cluster.getSettings.mockResponseOnce(
set<any>({}, 'defaults.http.max_content_length', 1028)
);
stream.chunkSize = 2;
stream.end('123456');
await new Promise((resolve) => stream.once('finish', resolve));

Expand Down Expand Up @@ -322,9 +320,7 @@ describe('ContentStream', () => {
});

it('should encode every chunk separately', async () => {
client.cluster.getSettings.mockResponseOnce(
set<any>({}, 'defaults.http.max_content_length', 1028)
);
base64Stream.chunkSize = 3;
base64Stream.end('12345678');
await new Promise((resolve) => base64Stream.once('finish', resolve));

Expand Down
20 changes: 10 additions & 10 deletions x-pack/plugins/reporting/server/lib/content_stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,6 @@ import type { ReportingCore } from '..';

const ONE_MB = 1024 * 1024;

/**
* The chunking size of reporting files. Larger CSV files will be split into
* multiple documents, where the stream is chunked into pieces of approximately
* this size. The actual document size will be slightly larger due to Base64
* encoding and JSON metadata.
*/
const CHUNK_SIZE = 4 * ONE_MB;

type Callback = (error?: Error) => void;
type SearchRequest = estypes.SearchRequest;

Expand Down Expand Up @@ -70,6 +62,14 @@ export class ContentStream extends Duplex {
*/
bytesWritten = 0;

/**
* The chunking size of reporting files. Larger CSV files will be split into
* multiple documents, where the stream is chunked into pieces of approximately
* this size. The actual document size will be slightly larger due to Base64
* encoding and JSON metadata.
*/
chunkSize = 4 * ONE_MB;

constructor(
private client: ElasticsearchClient,
private logger: Logger,
Expand Down Expand Up @@ -267,8 +267,8 @@ export class ContentStream extends Duplex {
}

private async flushAllFullChunks() {
while (this.bytesBuffered >= CHUNK_SIZE && this.buffers.length) {
await this.flush(CHUNK_SIZE);
while (this.bytesBuffered >= this.chunkSize && this.buffers.length) {
await this.flush(this.chunkSize);
}
}

Expand Down

0 comments on commit 2b4bb1b

Please sign in to comment.