forked from opensearch-project/OpenSearch-Dashboards
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[OSD Availability] Prevent OSD process crashes when disk is full (ope…
…nsearch-project#6733) * prevent crash when disk full Signed-off-by: Flyingliuhub <[email protected]> * change verbose to false Signed-off-by: Flyingliuhub <[email protected]> * add changeset file Signed-off-by: Flyingliuhub <[email protected]> * update changeset contexts Signed-off-by: Flyingliuhub <[email protected]> * change feature flag name Signed-off-by: Flyingliuhub <[email protected]> --------- Signed-off-by: Flyingliuhub <[email protected]> Co-authored-by: ZilongX <[email protected]>
- Loading branch information
1 parent
60f9d05
commit 45c7c15
Showing
7 changed files
with
179 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
fix: | ||
- [OSD Availability] Prevent OSD process crashes when disk is full ([#6733](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6733)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,148 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
import os from 'os'; | ||
import path from 'path'; | ||
import fs from 'fs'; | ||
import stripAnsi from 'strip-ansi'; | ||
import { getLoggerStream, onFinished } from './log_reporter'; | ||
|
||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); | ||
|
||
describe('getLoggerStream', () => { | ||
it('should log to stdout when the json config is set to false', async () => { | ||
const lines = []; | ||
const origWrite = process.stdout.write; | ||
process.stdout.write = (buffer) => { | ||
lines.push(stripAnsi(buffer.toString()).trim()); | ||
return true; | ||
}; | ||
|
||
const loggerStream = getLoggerStream({ | ||
config: { | ||
json: false, | ||
dest: 'stdout', | ||
filter: {}, | ||
}, | ||
events: { log: '*' }, | ||
}); | ||
|
||
loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); | ||
|
||
await sleep(500); | ||
|
||
process.stdout.write = origWrite; | ||
expect(lines.length).toBe(1); | ||
expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] test data$/); | ||
}); | ||
|
||
it('should log to stdout when the json config is set to true', async () => { | ||
const lines = []; | ||
const origWrite = process.stdout.write; | ||
process.stdout.write = (buffer) => { | ||
lines.push(JSON.parse(buffer.toString().trim())); | ||
return true; | ||
}; | ||
|
||
const loggerStream = getLoggerStream({ | ||
config: { | ||
json: true, | ||
dest: 'stdout', | ||
filter: {}, | ||
}, | ||
events: { log: '*' }, | ||
}); | ||
|
||
loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); | ||
|
||
await sleep(500); | ||
|
||
process.stdout.write = origWrite; | ||
expect(lines.length).toBe(1); | ||
expect(lines[0]).toMatchObject({ | ||
type: 'log', | ||
tags: ['foo'], | ||
message: 'test data', | ||
}); | ||
}); | ||
|
||
it('should log to custom file when the json config is set to false', async () => { | ||
const dir = os.tmpdir(); | ||
const logfile = `dest-${Date.now()}.log`; | ||
const dest = path.join(dir, logfile); | ||
|
||
const loggerStream = getLoggerStream({ | ||
config: { | ||
json: false, | ||
dest, | ||
filter: {}, | ||
}, | ||
events: { log: '*' }, | ||
}); | ||
|
||
loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); | ||
|
||
await sleep(500); | ||
|
||
const lines = stripAnsi(fs.readFileSync(dest, { encoding: 'utf8' })) | ||
.trim() | ||
.split(os.EOL); | ||
expect(lines.length).toBe(1); | ||
expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] test data$/); | ||
}); | ||
|
||
it('should log to custom file when the json config is set to true and ignoreEnospcError', async () => { | ||
const dir = os.tmpdir(); | ||
const logfile = `dest-${Date.now()}.log`; | ||
const dest = path.join(dir, logfile); | ||
|
||
const loggerStream = getLoggerStream({ | ||
config: { | ||
json: true, | ||
dest, | ||
ignoreEnospcError: true, | ||
filter: {}, | ||
}, | ||
events: { log: '*' }, | ||
}); | ||
|
||
loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); | ||
|
||
await sleep(500); | ||
|
||
const lines = fs | ||
.readFileSync(dest, { encoding: 'utf8' }) | ||
.trim() | ||
.split(os.EOL) | ||
.map((data) => JSON.parse(data)); | ||
expect(lines.length).toBe(1); | ||
expect(lines[0]).toMatchObject({ | ||
type: 'log', | ||
tags: ['foo'], | ||
message: 'test data', | ||
}); | ||
}); | ||
|
||
it('should handle ENOSPC error when disk full', () => { | ||
const error = { code: 'ENOSPC', stack: 'Error stack trace' }; | ||
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(); | ||
|
||
expect(() => { | ||
onFinished(error); | ||
}).not.toThrow(); | ||
|
||
expect(consoleErrorSpy).toHaveBeenCalledWith('Error in logging pipeline:', 'Error stack trace'); | ||
|
||
consoleErrorSpy.mockRestore(); | ||
}); | ||
|
||
it('should throw error for non-ENOSPC error', () => { | ||
const error = { message: 'non-ENOSPC error', code: 'OTHER', stack: 'Error stack trace' }; | ||
|
||
expect(() => { | ||
onFinished(error); | ||
}).toThrowError('non-ENOSPC error'); | ||
}); | ||
}); |