false}
- />
- );
-
- expect(getFieldSelectComboBox(wrapper).prop('options')!).toHaveLength(0);
+ renderDimensionPanel({
+ columnId: 'col2',
+ filterOperations: () => false,
+ });
+ userEvent.click(screen.getByRole('button', { name: /open list of options/i }));
+ expect(screen.getByText(/There aren't any options available/)).toBeInTheDocument();
});
it('should list all field names and document as a whole in prioritized order', () => {
- wrapper = mountWithServices();
-
- const options = getFieldSelectComboBox(wrapper).prop('options');
+ const { getVisibleFieldSelectOptions } = renderDimensionPanel();
- expect(options).toHaveLength(3);
+ const comboBoxButton = screen.getAllByRole('button', { name: /open list of options/i })[0];
+ const comboBoxInput = screen.getAllByTestId('comboBoxSearchInput')[0];
+ userEvent.click(comboBoxButton);
- expect(options![0].label).toEqual('Records');
- expect(options![1].options!.map(({ label }) => label)).toEqual([
+ const allOptions = [
+ 'Records',
'timestampLabel',
'bytes',
'memory',
'source',
- ]);
+ // these fields are generated to test the issue #148062 about fields that are using JS Object method names
+ ...Object.getOwnPropertyNames(Object.getPrototypeOf({})).sort(),
+ ];
+ expect(allOptions.slice(0, 7)).toEqual(getVisibleFieldSelectOptions());
- // these fields are generated to test the issue #148062 about fields that are using JS Object method names
- expect(options![2].options!.map(({ label }) => label)).toEqual(
- Object.getOwnPropertyNames(Object.getPrototypeOf({})).sort()
- );
+ // keep hitting arrow down to scroll to the next options (react-window only renders visible options)
+ userEvent.type(comboBoxInput, '{ArrowDown}'.repeat(12));
+
+ expect(getVisibleFieldSelectOptions()).toEqual(allOptions.slice(5, 16));
+
+ // press again to go back to the beginning
+ userEvent.type(comboBoxInput, '{ArrowDown}');
+ expect(getVisibleFieldSelectOptions()).toEqual(allOptions.slice(0, 9));
});
it('should hide fields that have no data', () => {
@@ -1783,7 +1804,11 @@ describe('FormBasedDimensionEditor', () => {
});
expect(setState.mock.calls[0]).toEqual([expect.any(Function), { isDimensionComplete: true }]);
- expect(setState.mock.calls[0][0](props.state)).toEqual({
+ let newState = props.state;
+ act(() => {
+ newState = setState.mock.calls[0][0](props.state);
+ });
+ expect(newState).toEqual({
...props.state,
layers: {
first: {
diff --git a/x-pack/plugins/lens/public/datasources/form_based/utils.test.tsx b/x-pack/plugins/lens/public/datasources/form_based/utils.test.tsx
index 7d7b2d9b59f83..3302aac2a5579 100644
--- a/x-pack/plugins/lens/public/datasources/form_based/utils.test.tsx
+++ b/x-pack/plugins/lens/public/datasources/form_based/utils.test.tsx
@@ -310,7 +310,11 @@ describe('indexpattern_datasource utils', () => {
]),
[
`${rootId}X${formulaParts.length}`,
- { operationType: 'math', references: formulaParts.map((_, i) => `${rootId}X${i}`) },
+ {
+ operationType: 'math',
+ references: formulaParts.map((_, i) => `${rootId}X${i}`),
+ label: 'Part of formula',
+ },
],
]);
}
diff --git a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/data_panel_wrapper.tsx b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/data_panel_wrapper.tsx
index b9ad0df04aaa3..da78db7ed0bc6 100644
--- a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/data_panel_wrapper.tsx
+++ b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/data_panel_wrapper.tsx
@@ -194,7 +194,9 @@ export const DataPanelWrapper = memo((props: DataPanelWrapperProps) => {
<>
{DataPanelComponent && (
- {DataPanelComponent(datasourceProps)}
+
+ {DataPanelComponent(datasourceProps)}
+
)}
>
);
diff --git a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/editor_frame.test.tsx b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/editor_frame.test.tsx
index 7cd48d6beb03d..d99f6418870fa 100644
--- a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/editor_frame.test.tsx
+++ b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/editor_frame.test.tsx
@@ -7,31 +7,18 @@
import React, { useEffect } from 'react';
import { ReactWrapper } from 'enzyme';
-import faker from 'faker';
-
-// Tests are executed in a jsdom environment who does not have sizing methods,
-// thus the AutoSizer will always compute a 0x0 size space
-// Mock the AutoSizer inside EuiSelectable (Chart Switch) and return some dimensions > 0
-jest.mock('react-virtualized-auto-sizer', () => {
- return function (props: {
- children: (dimensions: { width: number; height: number }) => React.ReactNode;
- disableHeight?: boolean;
- }) {
- const { children, disableHeight, ...otherProps } = props;
- return (
- // js-dom may complain that a non-DOM attributes are used when appending props
- // Handle the disableHeight case using native DOM styling
-
- {children({ width: 100, height: 100 })}
-
- );
- };
-});
+import { screen, fireEvent, within, waitFor } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
-import { EuiPanel, EuiToolTip } from '@elastic/eui';
import { EditorFrame, EditorFrameProps } from './editor_frame';
-import { DatasourcePublicAPI, DatasourceSuggestion, Visualization } from '../../types';
-import { act } from 'react-dom/test-utils';
+import {
+ DatasourceMap,
+ DatasourcePublicAPI,
+ DatasourceSuggestion,
+ Visualization,
+ VisualizationMap,
+} from '../../types';
+import { act } from '@testing-library/react';
import { coreMock } from '@kbn/core/public/mocks';
import {
createMockVisualization,
@@ -42,15 +29,13 @@ import {
renderWithReduxStore,
} from '../../mocks';
import { inspectorPluginMock } from '@kbn/inspector-plugin/public/mocks';
-import { ReactExpressionRendererType } from '@kbn/expressions-plugin/public';
import { DragDrop, useDragDropContext } from '@kbn/dom-drag-drop';
import { uiActionsPluginMock } from '@kbn/ui-actions-plugin/public/mocks';
import { chartPluginMock } from '@kbn/charts-plugin/public/mocks';
import { expressionsPluginMock } from '@kbn/expressions-plugin/public/mocks';
import { mockDataPlugin, mountWithProvider } from '../../mocks';
-import { setState } from '../../state_management';
+import { LensAppState, setState } from '../../state_management';
import { getLensInspectorService } from '../../lens_inspector_service';
-import { toExpression } from '@kbn/interpreter';
import { createIndexPatternServiceMock } from '../../mocks/data_views_service_mock';
import { dataViewPluginMocks } from '@kbn/data-views-plugin/public/mocks';
import { EventAnnotationServiceType } from '@kbn/event-annotation-plugin/public';
@@ -105,6 +90,7 @@ function getDefaultProps() {
indexPatternService: createIndexPatternServiceMock(),
getUserMessages: () => [],
addUserMessages: () => () => {},
+ ExpressionRenderer: createExpressionRendererMock(),
};
return defaultProps;
}
@@ -116,207 +102,172 @@ describe('editor_frame', () => {
let mockVisualization2: jest.Mocked;
let mockDatasource2: DatasourceMock;
- let expressionRendererMock: ReactExpressionRendererType;
+ let visualizationMap: VisualizationMap;
+ let datasourceMap: DatasourceMap;
beforeEach(() => {
- mockVisualization = {
- ...createMockVisualization(),
- id: 'testVis',
- visualizationTypes: [
- {
- icon: 'empty',
- id: 'testVis',
- label: faker.lorem.word(),
- groupLabel: 'testVisGroup',
- },
- ],
- };
- mockVisualization2 = {
- ...createMockVisualization(),
- id: 'testVis2',
- visualizationTypes: [
- {
- icon: 'empty',
- id: 'testVis2',
- label: 'TEST2',
- groupLabel: 'testVis2Group',
- },
- ],
- };
-
- mockVisualization.getLayerIds.mockReturnValue(['first']);
- mockVisualization2.getLayerIds.mockReturnValue(['second']);
+ mockVisualization = createMockVisualization();
+ mockVisualization2 = createMockVisualization('testVis2', ['second']);
mockDatasource = createMockDatasource();
mockDatasource2 = createMockDatasource('testDatasource2');
+ mockDatasource.getLayers.mockReturnValue(['first']);
+ mockDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
+ {
+ state: {},
+ table: {
+ columns: [],
+ isMultiRow: true,
+ layerId: 'first',
+ changeType: 'unchanged',
+ },
+ keptLayerIds: [],
+ },
+ ]);
+
+ visualizationMap = {
+ testVis: mockVisualization,
+ testVis2: mockVisualization2,
+ };
- expressionRendererMock = createExpressionRendererMock();
+ datasourceMap = {
+ testDatasource: mockDatasource,
+ testDatasource2: mockDatasource2,
+ };
});
- describe('initialization', () => {
- it('should not render something before all datasources are initialized', async () => {
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- const { lensStore } = await mountWithProvider(, {
+ const renderEditorFrame = (
+ propsOverrides: Partial = {},
+ { preloadedStateOverrides }: { preloadedStateOverrides: Partial } = {
+ preloadedStateOverrides: {},
+ }
+ ) => {
+ const { store, ...rtlRender } = renderWithReduxStore(
+ ,
+ {},
+ {
preloadedState: {
activeDatasourceId: 'testDatasource',
- datasourceStates: {
- testDatasource: {
- isLoading: true,
- state: {
- internalState1: '',
- },
- },
- },
- },
- });
- expect(mockDatasource.DataPanelComponent).not.toHaveBeenCalled();
- lensStore.dispatch(
- setState({
+ visualization: { activeId: mockVisualization.id, state: 'initialState' },
datasourceStates: {
testDatasource: {
isLoading: false,
state: {
- internalState1: '',
+ internalState: 'datasourceState',
},
},
},
- })
- );
- expect(mockDatasource.DataPanelComponent).toHaveBeenCalled();
- });
-
- it('should initialize visualization state and render config panel', async () => {
- const initialState = {};
- mockDatasource.getLayers.mockReturnValue(['first']);
-
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: { ...mockVisualization, initialize: () => initialState },
- },
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- initialize: () => Promise.resolve(),
- },
+ ...preloadedStateOverrides,
},
+ storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
+ }
+ );
- ExpressionRenderer: expressionRendererMock,
- };
+ const openChartSwitch = () => {
+ userEvent.click(screen.getByTestId('lnsChartSwitchPopover'));
+ };
- await mountWithProvider(, {
- preloadedState: {
- visualization: { activeId: 'testVis', state: initialState },
- },
+ const waitForChartSwitchClosed = () => {
+ waitFor(() => {
+ expect(screen.queryByTestId('lnsChartSwitchList')).not.toBeInTheDocument();
});
+ };
- expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
- expect.objectContaining({ state: initialState })
- );
- });
-
- let instance: ReactWrapper;
-
- it('should render the resulting expression using the expression renderer', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
-
- const props: EditorFrameProps = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: {
- ...mockVisualization,
- toExpression: (state, datasourceLayers, attrs, datasourceExpressionsByLayers = {}) =>
- toExpression({
- type: 'expression',
- chain: [
- ...(datasourceExpressionsByLayers.first?.chain ?? []),
- { type: 'function', function: 'testVis', arguments: {} },
- ],
- }),
- },
- },
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- toExpression: () => 'datasource',
- },
- },
+ const getMenuItem = (subType: string) => {
+ const list = screen.getByTestId('lnsChartSwitchList');
+ return within(list).getByTestId(`lnsChartSwitchPopover_${subType}`);
+ };
- ExpressionRenderer: expressionRendererMock,
- };
- instance = (
- await mountWithProvider(, {
- preloadedState: {
- visualization: { activeId: 'testVis', state: {} },
+ const switchToVis = (subType: string) => {
+ fireEvent.click(getMenuItem(subType));
+ };
+ const queryLayerPanel = () => screen.queryByTestId('lns-layerPanel-0');
+ const queryWorkspacePanel = () => screen.queryByTestId('lnsWorkspace');
+ const queryDataPanel = () => screen.queryByTestId('lnsDataPanelWrapper');
+
+ return {
+ ...rtlRender,
+ store,
+ switchToVis,
+ getMenuItem,
+ openChartSwitch,
+ queryLayerPanel,
+ queryWorkspacePanel,
+ queryDataPanel,
+ waitForChartSwitchClosed,
+ simulateLoadingDatasource: () =>
+ store.dispatch(
+ setState({
datasourceStates: {
testDatasource: {
isLoading: false,
state: {
- internalState1: '',
+ internalState: 'datasourceState',
+ },
+ },
+ },
+ })
+ ),
+ };
+ };
+
+ describe('initialization', () => {
+ it('should render workspace panel, data panel and layer panel when all datasources are initialized', async () => {
+ const { queryWorkspacePanel, queryDataPanel, queryLayerPanel, simulateLoadingDatasource } =
+ renderEditorFrame(undefined, {
+ preloadedStateOverrides: {
+ datasourceStates: {
+ testDatasource: {
+ isLoading: true,
+ state: {
+ internalState: 'datasourceState',
},
},
},
},
- })
- ).instance;
+ });
- instance.update();
+ expect(mockVisualization.getConfiguration).not.toHaveBeenCalled();
+ expect(queryWorkspacePanel()).not.toBeInTheDocument();
+ expect(queryDataPanel()).not.toBeInTheDocument();
+ expect(queryLayerPanel()).not.toBeInTheDocument();
- expect(instance.find(expressionRendererMock).prop('expression')).toMatchInlineSnapshot(`
- "datasource
- | testVis"
- `);
+ simulateLoadingDatasource();
+ expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
+ expect.objectContaining({ state: 'initialState' })
+ );
+
+ expect(queryWorkspacePanel()).toBeInTheDocument();
+ expect(queryDataPanel()).toBeInTheDocument();
+ expect(queryLayerPanel()).toBeInTheDocument();
+ });
+ it('should render the resulting expression using the expression renderer', async () => {
+ renderEditorFrame();
+ expect(screen.getByTestId('lnsExpressionRenderer')).toHaveTextContent(
+ 'datasource_expression | testVis'
+ );
});
});
describe('state update', () => {
it('should re-render config panel after state update', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: { ...mockVisualization, toExpression: () => null },
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- },
+ const { store } = renderEditorFrame();
+ const updatedState = 'updatedVisState';
- ExpressionRenderer: expressionRendererMock,
- };
- renderWithReduxStore(
- ,
- {},
- {
- preloadedState: {
- activeDatasourceId: 'testDatasource',
- visualization: { activeId: mockVisualization.id, state: {} },
- datasourceStates: {
- testDatasource: {
- isLoading: false,
- state: '',
- },
- },
+ store.dispatch(
+ setState({
+ visualization: {
+ activeId: mockVisualization.id,
+ state: updatedState,
},
- }
+ })
);
- const updatedState = {};
- const setDatasourceState = (mockDatasource.DataPanelComponent as jest.Mock).mock.calls[0][0]
- .setState;
- act(() => {
- setDatasourceState(updatedState);
- });
-
expect(mockVisualization.getConfiguration).toHaveBeenCalledTimes(3);
expect(mockVisualization.getConfiguration).toHaveBeenLastCalledWith(
expect.objectContaining({
@@ -326,20 +277,7 @@ describe('editor_frame', () => {
});
it('should re-render data panel after state update', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
-
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- await mountWithProvider();
+ renderEditorFrame();
const setDatasourceState = (mockDatasource.DataPanelComponent as jest.Mock).mock.calls[0][0]
.setState;
@@ -349,9 +287,8 @@ describe('editor_frame', () => {
const updatedState = {
title: 'shazm',
};
- act(() => {
- setDatasourceState(updatedState);
- });
+
+ setDatasourceState(updatedState);
expect(mockDatasource.DataPanelComponent).toHaveBeenCalledTimes(1);
expect(mockDatasource.DataPanelComponent).toHaveBeenLastCalledWith(
@@ -362,21 +299,7 @@ describe('editor_frame', () => {
});
it('should re-render config panel with updated datasource api after datasource state update', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- await mountWithProvider(, {
- preloadedState: { visualization: { activeId: mockVisualization.id, state: {} } },
- });
+ renderEditorFrame();
const updatedPublicAPI: DatasourcePublicAPI = {
datasourceId: 'testDatasource',
@@ -394,9 +317,8 @@ describe('editor_frame', () => {
const setDatasourceState = (mockDatasource.DataPanelComponent as jest.Mock).mock.calls[0][0]
.setState;
- act(() => {
- setDatasourceState('newState');
- });
+
+ setDatasourceState('newState');
expect(mockVisualization.getConfiguration).toHaveBeenCalledTimes(1);
expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
@@ -413,34 +335,10 @@ describe('editor_frame', () => {
describe('datasource public api communication', () => {
it('should give access to the datasource state in the datasource factory function', async () => {
- const datasourceState = {};
- mockDatasource.initialize.mockReturnValue(datasourceState);
- mockDatasource.getLayers.mockReturnValue(['first']);
-
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- await mountWithProvider(, {
- preloadedState: {
- datasourceStates: {
- testDatasource: {
- isLoading: false,
- state: {},
- },
- },
- },
- });
+ renderEditorFrame();
expect(mockDatasource.getPublicAPI).toHaveBeenCalledWith({
- state: datasourceState,
+ state: { internalState: 'datasourceState' },
layerId: 'first',
indexPatterns: {},
});
@@ -448,75 +346,15 @@ describe('editor_frame', () => {
});
describe('switching', () => {
- let instance: ReactWrapper;
-
- function switchTo(subType: string) {
- act(() => {
- instance.find('[data-test-subj="lnsChartSwitchPopover"]').last().simulate('click');
- });
-
- instance.update();
-
- act(() => {
- instance
- .find(`[data-test-subj="lnsChartSwitchPopover_${subType}"]`)
- .last()
- .simulate('click');
- });
- }
-
- beforeEach(async () => {
- mockVisualization2.initialize.mockReturnValue({ initial: true });
- mockDatasource.getLayers.mockReturnValue(['first', 'second']);
- mockDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
- {
- state: {},
- table: {
- columns: [],
- isMultiRow: true,
- layerId: 'first',
- changeType: 'unchanged',
- },
- keptLayerIds: [],
- },
- ]);
-
- const visualizationMap = {
- testVis: mockVisualization,
- testVis2: mockVisualization2,
- };
-
- const datasourceMap = {
- testDatasource: mockDatasource,
- testDatasource2: mockDatasource2,
- };
-
- const props = {
- ...getDefaultProps(),
- visualizationMap,
- datasourceMap,
- ExpressionRenderer: expressionRendererMock,
- };
- instance = (
- await mountWithProvider(, {
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- })
- ).instance;
-
- // necessary to flush elements to dom synchronously
- instance.update();
- });
-
- afterEach(() => {
- instance.unmount();
- });
-
it('should initialize other visualization on switch', async () => {
- switchTo('testVis2');
+ const { openChartSwitch, switchToVis } = renderEditorFrame();
+ openChartSwitch();
+ switchToVis('testVis2');
expect(mockVisualization2.initialize).toHaveBeenCalled();
});
it('should use suggestions to switch to new visualization', async () => {
+ const { openChartSwitch, switchToVis } = renderEditorFrame();
const initialState = { suggested: true };
mockVisualization2.initialize.mockReturnValueOnce({ initial: true });
mockVisualization2.getVisualizationTypeId.mockReturnValueOnce('testVis2');
@@ -528,9 +366,8 @@ describe('editor_frame', () => {
previewIcon: 'empty',
},
]);
-
- switchTo('testVis2');
-
+ openChartSwitch();
+ switchToVis('testVis2');
expect(mockVisualization2.getSuggestions).toHaveBeenCalled();
expect(mockVisualization2.initialize).toHaveBeenCalledWith(expect.anything(), initialState);
expect(mockVisualization2.getConfiguration).toHaveBeenCalledWith(
@@ -541,7 +378,9 @@ describe('editor_frame', () => {
it('should fall back when switching visualizations if the visualization has no suggested use', async () => {
mockVisualization2.initialize.mockReturnValueOnce({ initial: true });
- switchTo('testVis2');
+ const { openChartSwitch, switchToVis, waitForChartSwitchClosed } = renderEditorFrame();
+ openChartSwitch();
+ switchToVis('testVis2');
expect(mockDatasource.publicAPIMock.getTableSpec).toHaveBeenCalled();
expect(mockVisualization2.getSuggestions).toHaveBeenCalled();
@@ -553,131 +392,80 @@ describe('editor_frame', () => {
expect(mockVisualization2.getConfiguration).toHaveBeenCalledWith(
expect.objectContaining({ state: { initial: true } })
);
+ waitForChartSwitchClosed();
});
});
describe('suggestions', () => {
it('should fetch suggestions of currently active datasource', async () => {
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- testDatasource2: mockDatasource2,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- await mountWithProvider();
-
+ renderEditorFrame();
expect(mockDatasource.getDatasourceSuggestionsFromCurrentState).toHaveBeenCalled();
expect(mockDatasource2.getDatasourceSuggestionsFromCurrentState).not.toHaveBeenCalled();
});
it('should fetch suggestions of all visualizations', async () => {
- mockDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
- {
- state: {},
- table: {
- changeType: 'unchanged',
- columns: [],
- isMultiRow: true,
- layerId: 'first',
- },
- keptLayerIds: [],
- },
- ]);
-
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: mockVisualization,
- testVis2: mockVisualization2,
- },
- datasourceMap: {
- testDatasource: mockDatasource,
- testDatasource2: mockDatasource2,
- },
-
- ExpressionRenderer: expressionRendererMock,
- };
- await mountWithProvider();
+ renderEditorFrame();
expect(mockVisualization.getSuggestions).toHaveBeenCalled();
expect(mockVisualization2.getSuggestions).toHaveBeenCalled();
});
- let instance: ReactWrapper;
it('should display top 5 suggestions in descending order', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: {
- ...mockVisualization,
- getSuggestions: () => [
- {
- score: 0.1,
- state: {},
- title: 'Suggestion6',
- previewIcon: 'empty',
- },
- {
- score: 0.5,
- state: {},
- title: 'Suggestion3',
- previewIcon: 'empty',
- },
- {
- score: 0.7,
- state: {},
- title: 'Suggestion2',
- previewIcon: 'empty',
- },
- {
- score: 0.8,
- state: {},
- title: 'Suggestion1',
- previewIcon: 'empty',
- },
- ],
- },
- testVis2: {
- ...mockVisualization,
- getSuggestions: () => [
- {
- score: 0.4,
- state: {},
- title: 'Suggestion5',
- previewIcon: 'empty',
- },
- {
- score: 0.45,
- state: {},
- title: 'Suggestion4',
- previewIcon: 'empty',
- },
- ],
- },
+ visualizationMap = {
+ testVis: {
+ ...mockVisualization,
+ getSuggestions: () => [
+ {
+ score: 0.1,
+ state: {},
+ title: 'Suggestion6',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.5,
+ state: {},
+ title: 'Suggestion3',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.7,
+ state: {},
+ title: 'Suggestion2',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.8,
+ state: {},
+ title: 'Suggestion1',
+ previewIcon: 'empty',
+ },
+ ],
},
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
- },
+ testVis2: {
+ ...mockVisualization,
+ getSuggestions: () => [
+ {
+ score: 0.4,
+ state: {},
+ title: 'Suggestion5',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.45,
+ state: {},
+ title: 'Suggestion4',
+ previewIcon: 'empty',
+ },
+ ],
},
-
- ExpressionRenderer: expressionRendererMock,
};
- instance = (await mountWithProvider()).instance;
+
+ renderEditorFrame();
expect(
- instance
- .find('[data-test-subj="lnsSuggestion"]')
- .find(EuiPanel)
- .map((el) => el.parents(EuiToolTip).prop('content'))
+ within(screen.getByTestId('lnsSuggestionsPanel'))
+ .getAllByTestId('lnsSuggestion')
+ .map((el) => el.textContent)
).toEqual([
'Current visualization',
'Suggestion1',
@@ -691,39 +479,26 @@ describe('editor_frame', () => {
it('should switch to suggested visualization', async () => {
mockDatasource.getLayers.mockReturnValue(['first', 'second', 'third']);
const newDatasourceState = {};
- const suggestionVisState = {};
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: {
- ...mockVisualization,
- getSuggestions: () => [
- {
- score: 0.8,
- state: suggestionVisState,
- title: 'Suggestion1',
- previewIcon: 'empty',
- },
- ],
- },
- testVis2: mockVisualization2,
- },
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
- },
+ const suggestionVisState = { suggested: true };
+
+ visualizationMap = {
+ testVis: {
+ ...mockVisualization,
+ getSuggestions: () => [
+ {
+ score: 0.8,
+ state: suggestionVisState,
+ title: 'Suggestion1',
+ previewIcon: 'empty',
+ },
+ ],
},
-
- ExpressionRenderer: expressionRendererMock,
+ testVis2: mockVisualization2,
};
- instance = (await mountWithProvider()).instance;
- act(() => {
- instance.find('[data-test-subj="lnsSuggestion"]').at(2).simulate('click');
- });
+ renderEditorFrame();
+ userEvent.click(screen.getByLabelText(/Suggestion1/i));
- expect(mockVisualization.getConfiguration).toHaveBeenCalledTimes(2);
expect(mockVisualization.getConfiguration).toHaveBeenLastCalledWith(
expect.objectContaining({
state: suggestionVisState,
@@ -735,13 +510,18 @@ describe('editor_frame', () => {
})
);
});
+ describe('legacy tests', () => {
+ let instance: ReactWrapper;
+
+ afterEach(() => {
+ instance.unmount();
+ });
+
+ // this test doesn't test anything, it's buggy and should be rewritten when we find a way to user test drag and drop
+ it.skip('should switch to best suggested visualization on field drop', async () => {
+ const suggestionVisState = {};
- it('should switch to best suggested visualization on field drop', async () => {
- mockDatasource.getLayers.mockReturnValue(['first']);
- const suggestionVisState = {};
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
+ visualizationMap = {
testVis: {
...mockVisualization,
getSuggestions: () => [
@@ -760,210 +540,196 @@ describe('editor_frame', () => {
],
},
testVis2: mockVisualization2,
- },
- datasourceMap: {
+ };
+ datasourceMap = {
testDatasource: {
...mockDatasource,
getDatasourceSuggestionsForField: () => [generateSuggestion()],
getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
getDatasourceSuggestionsForVisualizeField: () => [generateSuggestion()],
},
- },
+ };
+ renderEditorFrame();
- ExpressionRenderer: expressionRendererMock,
- };
- instance = (await mountWithProvider()).instance;
+ mockVisualization.getConfiguration.mockClear();
+ act(() => {
+ instance.find('[data-test-subj="lnsWorkspace"]').last().simulate('drop');
+ });
- act(() => {
- instance.find('[data-test-subj="lnsWorkspace"]').last().simulate('drop');
+ expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
+ expect.objectContaining({
+ state: {},
+ })
+ );
});
- expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
- expect.objectContaining({
- state: suggestionVisState,
- })
- );
- });
-
- it('should use the currently selected visualization if possible on field drop', async () => {
- mockDatasource.getLayers.mockReturnValue(['first', 'second', 'third']);
- const suggestionVisState = {};
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: {
- ...mockVisualization,
- getSuggestions: () => [
- {
- score: 0.2,
- state: {},
- title: 'Suggestion1',
- previewIcon: 'empty',
- },
- {
- score: 0.6,
- state: suggestionVisState,
- title: 'Suggestion2',
- previewIcon: 'empty',
- },
- ],
- },
- testVis2: {
- ...mockVisualization2,
- getSuggestions: () => [
- {
- score: 0.8,
- state: {},
- title: 'Suggestion3',
- previewIcon: 'empty',
- },
- ],
+ it('should use the currently selected visualization if possible on field drop', async () => {
+ mockDatasource.getLayers.mockReturnValue(['first', 'second', 'third']);
+ const suggestionVisState = {};
+ const props = {
+ ...getDefaultProps(),
+ visualizationMap: {
+ testVis: {
+ ...mockVisualization,
+ getSuggestions: () => [
+ {
+ score: 0.2,
+ state: {},
+ title: 'Suggestion1',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.6,
+ state: suggestionVisState,
+ title: 'Suggestion2',
+ previewIcon: 'empty',
+ },
+ ],
+ },
+ testVis2: {
+ ...mockVisualization2,
+ getSuggestions: () => [
+ {
+ score: 0.8,
+ state: {},
+ title: 'Suggestion3',
+ previewIcon: 'empty',
+ },
+ ],
+ },
},
- },
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- getDatasourceSuggestionsForField: () => [generateSuggestion()],
- getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
- getDatasourceSuggestionsForVisualizeField: () => [generateSuggestion()],
- DataPanelComponent: jest.fn().mockImplementation(() => ),
+ datasourceMap: {
+ testDatasource: {
+ ...mockDatasource,
+ getDatasourceSuggestionsForField: () => [generateSuggestion()],
+ getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
+ getDatasourceSuggestionsForVisualizeField: () => [generateSuggestion()],
+ DataPanelComponent: jest.fn().mockImplementation(() => ),
+ },
},
- },
-
- ExpressionRenderer: expressionRendererMock,
- } as EditorFrameProps;
- instance = (
- await mountWithProvider(, {
- preloadedState: {
- datasourceStates: {
- testDatasource: {
- isLoading: false,
- state: {
- internalState1: '',
+ } as EditorFrameProps;
+ instance = (
+ await mountWithProvider(, {
+ preloadedState: {
+ datasourceStates: {
+ testDatasource: {
+ isLoading: false,
+ state: {
+ internalState1: '',
+ },
},
},
},
- },
- })
- ).instance;
-
- instance.update();
+ })
+ ).instance;
+
+ instance.update();
+
+ act(() => {
+ instance.find('[data-test-subj="mockVisA"]').find(DragDrop).prop('onDrop')!(
+ {
+ indexPatternId: '1',
+ field: {},
+ id: '1',
+ humanData: { label: 'draggedField' },
+ },
+ 'field_add'
+ );
+ });
- act(() => {
- instance.find('[data-test-subj="mockVisA"]').find(DragDrop).prop('onDrop')!(
- {
- indexPatternId: '1',
- field: {},
- id: '1',
- humanData: { label: 'draggedField' },
- },
- 'field_add'
+ expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
+ expect.objectContaining({
+ state: suggestionVisState,
+ })
);
});
- expect(mockVisualization.getConfiguration).toHaveBeenCalledWith(
- expect.objectContaining({
- state: suggestionVisState,
- })
- );
- });
-
- it('should use the highest priority suggestion available', async () => {
- mockDatasource.getLayers.mockReturnValue(['first', 'second', 'third']);
- const suggestionVisState = {};
- const mockVisualization3 = {
- ...createMockVisualization(),
- id: 'testVis3',
- getLayerIds: () => ['third'],
- visualizationTypes: [
- {
- icon: 'empty',
- id: 'testVis3',
- label: 'TEST3',
- groupLabel: 'testVis3Group',
- },
- ],
- getSuggestions: () => [
- {
- score: 0.9,
- state: suggestionVisState,
- title: 'Suggestion3',
- previewIcon: 'empty',
- },
- {
- score: 0.7,
- state: {},
- title: 'Suggestion4',
- previewIcon: 'empty',
- },
- ],
- };
-
- const props = {
- ...getDefaultProps(),
- visualizationMap: {
- testVis: {
- ...mockVisualization,
- // do not return suggestions for the currently active vis, otherwise it will be chosen
- getSuggestions: () => [],
- },
- testVis2: {
- ...mockVisualization2,
- getSuggestions: () => [],
- },
- testVis3: {
- ...mockVisualization3,
+ it('should use the highest priority suggestion available', async () => {
+ mockDatasource.getLayers.mockReturnValue(['first', 'second', 'third']);
+ const suggestionVisState = {};
+ const mockVisualization3 = {
+ ...createMockVisualization('testVis3', ['third']),
+ getSuggestions: () => [
+ {
+ score: 0.9,
+ state: suggestionVisState,
+ title: 'Suggestion3',
+ previewIcon: 'empty',
+ },
+ {
+ score: 0.7,
+ state: {},
+ title: 'Suggestion4',
+ previewIcon: 'empty',
+ },
+ ],
+ };
+
+ const props = {
+ ...getDefaultProps(),
+ visualizationMap: {
+ testVis: {
+ ...mockVisualization,
+ // do not return suggestions for the currently active vis, otherwise it will be chosen
+ getSuggestions: () => [],
+ },
+ testVis2: {
+ ...mockVisualization2,
+ getSuggestions: () => [],
+ },
+ testVis3: {
+ ...mockVisualization3,
+ },
},
- },
- datasourceMap: {
- testDatasource: {
- ...mockDatasource,
- getDatasourceSuggestionsForField: () => [generateSuggestion()],
- getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
- getDatasourceSuggestionsForVisualizeField: () => [generateSuggestion()],
- DataPanelComponent: jest.fn().mockImplementation(() => {
- const [, dndDispatch] = useDragDropContext();
- useEffect(() => {
- dndDispatch({
- type: 'startDragging',
- payload: {
- dragging: {
- id: 'draggedField',
- humanData: { label: '1' },
+ datasourceMap: {
+ testDatasource: {
+ ...mockDatasource,
+ getDatasourceSuggestionsForField: () => [generateSuggestion()],
+ getDatasourceSuggestionsFromCurrentState: () => [generateSuggestion()],
+ getDatasourceSuggestionsForVisualizeField: () => [generateSuggestion()],
+ DataPanelComponent: jest.fn().mockImplementation(() => {
+ const [, dndDispatch] = useDragDropContext();
+ useEffect(() => {
+ dndDispatch({
+ type: 'startDragging',
+ payload: {
+ dragging: {
+ id: 'draggedField',
+ humanData: { label: '1' },
+ },
},
- },
- });
- }, [dndDispatch]);
- return ;
- }),
+ });
+ }, [dndDispatch]);
+ return ;
+ }),
+ },
},
- },
- ExpressionRenderer: expressionRendererMock,
- } as EditorFrameProps;
+ } as EditorFrameProps;
- instance = (await mountWithProvider()).instance;
+ instance = (await mountWithProvider()).instance;
- instance.update();
+ instance.update();
- act(() => {
- instance.find(DragDrop).filter('[dataTestSubj="lnsWorkspace"]').prop('onDrop')!(
- {
- indexPatternId: '1',
- field: {},
- id: '1',
- humanData: {
- label: 'label',
+ act(() => {
+ instance.find(DragDrop).filter('[dataTestSubj="lnsWorkspace"]').prop('onDrop')!(
+ {
+ indexPatternId: '1',
+ field: {},
+ id: '1',
+ humanData: {
+ label: 'label',
+ },
},
- },
- 'field_add'
+ 'field_add'
+ );
+ });
+
+ expect(mockVisualization3.getConfiguration).toHaveBeenCalledWith(
+ expect.objectContaining({
+ state: suggestionVisState,
+ })
);
});
-
- expect(mockVisualization3.getConfiguration).toHaveBeenCalledWith(
- expect.objectContaining({
- state: suggestionVisState,
- })
- );
});
});
});
diff --git a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.test.tsx b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.test.tsx
index 3e613d5a23e89..a82da848dc4a2 100644
--- a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.test.tsx
+++ b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.test.tsx
@@ -6,62 +6,54 @@
*/
import React from 'react';
-import { ReactWrapper } from 'enzyme';
-import type { PaletteOutput } from '@kbn/coloring';
+import { screen, fireEvent, within } from '@testing-library/react';
+import userEvent from '@testing-library/user-event';
import {
createMockVisualization,
mockStoreDeps,
createMockFramePublicAPI,
mockDatasourceMap,
mockDatasourceStates,
+ renderWithReduxStore,
} from '../../../mocks';
-import { mountWithProvider } from '../../../mocks';
-
-// Tests are executed in a jsdom environment who does not have sizing methods,
-// thus the AutoSizer will always compute a 0x0 size space
-// Mock the AutoSizer inside EuiSelectable (Chart Switch) and return some dimensions > 0
-jest.mock('react-virtualized-auto-sizer', () => {
- return function (props: {
- children: (dimensions: { width: number; height: number }) => React.ReactNode;
- }) {
- const { children } = props;
- return {children({ width: 100, height: 100 })}
;
- };
-});
-import { Visualization, FramePublicAPI, DatasourcePublicAPI } from '../../../types';
-import { ChartSwitch } from './chart_switch';
-import { applyChanges } from '../../../state_management';
+import {
+ Visualization,
+ FramePublicAPI,
+ DatasourcePublicAPI,
+ SuggestionRequest,
+} from '../../../types';
+import { ChartSwitch, ChartSwitchProps } from './chart_switch';
+import { LensAppState, applyChanges } from '../../../state_management';
describe('chart_switch', () => {
function generateVisualization(id: string): jest.Mocked {
return {
- ...createMockVisualization(),
- id,
- getVisualizationTypeId: jest.fn((_state) => id),
- visualizationTypes: [
+ ...createMockVisualization(id),
+ getSuggestions: jest.fn((options) => [
{
- icon: 'empty',
- id,
- label: `Label ${id}`,
- groupLabel: `${id}Group`,
+ score: 1,
+ title: '',
+ state: `suggestion ${id}`,
+ previewIcon: 'empty',
},
- ],
- initialize: jest.fn((_addNewLayer, state) => {
- return state || `${id} initial state`;
- }),
- getSuggestions: jest.fn((options) => {
- return [
- {
- score: 1,
- title: '',
- state: `suggestion ${id}`,
- previewIcon: 'empty',
- },
- ];
- }),
+ ]),
};
}
+ let visualizationMap = mockVisualizationMap();
+ let datasourceMap = mockDatasourceMap();
+ let datasourceStates = mockDatasourceStates();
+ let frame = mockFrame(['a']);
+
+ beforeEach(() => {
+ visualizationMap = mockVisualizationMap();
+ datasourceMap = mockDatasourceMap();
+ datasourceStates = mockDatasourceStates();
+ frame = mockFrame(['a']);
+ });
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
/**
* There are three visualizations. Each one has the same suggestion behavior:
@@ -145,44 +137,66 @@ describe('chart_switch', () => {
} as FramePublicAPI;
}
- function showFlyout(instance: ReactWrapper) {
- instance.find('button[data-test-subj="lnsChartSwitchPopover"]').first().simulate('click');
- }
-
- function switchTo(subType: string, instance: ReactWrapper) {
- showFlyout(instance);
- instance.find(`[data-test-subj="lnsChartSwitchPopover_${subType}"]`).first().simulate('click');
- }
-
- function getMenuItem(subType: string, instance: ReactWrapper) {
- showFlyout(instance);
- return instance.find(`[data-test-subj="lnsChartSwitchPopover_${subType}"]`).first();
- }
- it('should use suggested state if there is a suggestion from the target visualization', async () => {
- const visualizationMap = mockVisualizationMap();
- const { instance, lensStore } = await mountWithProvider(
+ const renderChartSwitch = (
+ propsOverrides: Partial = {},
+ { preloadedStateOverrides }: { preloadedStateOverrides: Partial } = {
+ preloadedStateOverrides: {},
+ }
+ ) => {
+ const { store, ...rtlRender } = renderWithReduxStore(
,
+ {},
{
+ storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
preloadedState: {
visualization: {
activeId: 'visA',
state: 'state from a',
},
+ datasourceStates,
+ activeDatasourceId: 'testDatasource',
+ ...preloadedStateOverrides,
},
}
);
- switchTo('visB', instance);
+ const openChartSwitch = () => {
+ userEvent.click(screen.getByTestId('lnsChartSwitchPopover'));
+ };
+
+ const getMenuItem = (subType: string) => {
+ const list = screen.getByTestId('lnsChartSwitchList');
+ return within(list).getByTestId(`lnsChartSwitchPopover_${subType}`);
+ };
+
+ const switchToVis = (subType: string) => {
+ fireEvent.click(getMenuItem(subType));
+ };
+
+ return {
+ ...rtlRender,
+ store,
+ switchToVis,
+ getMenuItem,
+ openChartSwitch,
+ };
+ };
+
+ it('should use suggested state if there is a suggestion from the target visualization', async () => {
+ const { store, openChartSwitch, switchToVis } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
- visualizationState: 'suggestion visB',
+ visualizationState: 'visB initial state',
newVisualizationId: 'visB',
datasourceId: 'testDatasource',
datasourceState: {},
@@ -190,38 +204,18 @@ describe('chart_switch', () => {
clearStagedPreview: true,
},
});
- expect(lensStore.dispatch).not.toHaveBeenCalledWith({ type: applyChanges.type }); // should not apply changes automatically
+ expect(store.dispatch).not.toHaveBeenCalledWith({ type: applyChanges.type }); // should not apply changes automatically
});
it('should use initial state if there is no suggestion from the target visualization', async () => {
- const visualizationMap = mockVisualizationMap();
visualizationMap.visB.getSuggestions.mockReturnValueOnce([]);
- const frame = mockFrame(['a']);
(frame.datasourceLayers.a?.getTableSpec as jest.Mock).mockReturnValue([]);
- const datasourceMap = mockDatasourceMap();
- const datasourceStates = mockDatasourceStates();
- const { instance, lensStore } = await mountWithProvider(
- ,
- {
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- preloadedState: {
- datasourceStates,
- activeDatasourceId: 'testDatasource',
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
+ const { store, switchToVis, openChartSwitch } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- switchTo('visB', instance);
expect(datasourceMap.testDatasource.removeLayer).toHaveBeenCalledWith({}, 'a'); // from preloaded state
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
@@ -231,16 +225,13 @@ describe('chart_switch', () => {
clearStagedPreview: true,
},
});
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/removeLayers',
payload: { layerIds: ['a'], visualizationId: 'visA' },
});
});
it('should indicate data loss if not all columns will be used', async () => {
- const visualizationMap = mockVisualizationMap();
- const frame = mockFrame(['a']);
- const datasourceMap = mockDatasourceMap();
datasourceMap.testDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
{
state: {},
@@ -276,61 +267,21 @@ describe('chart_switch', () => {
{ columnId: 'col3', fields: [] },
]);
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
- expect(
- getMenuItem('visB', instance)
- .find('[data-test-subj="lnsChartSwitchPopoverAlert_visB"]')
- .first()
- .props().type
- ).toEqual('warning');
+ expect(within(getMenuItem('visB')).getByText(/warning/i)).toBeInTheDocument();
});
it('should indicate data loss if not all layers will be used', async () => {
- const visualizationMap = mockVisualizationMap();
- const frame = mockFrame(['a', 'b']);
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- expect(
- getMenuItem('visB', instance)
- .find('[data-test-subj="lnsChartSwitchPopoverAlert_visB"]')
- .first()
- .props().type
- ).toEqual('warning');
+ frame = mockFrame(['a', 'b']);
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
+ expect(within(getMenuItem('visB')).getByText(/warning/i)).toBeInTheDocument();
});
it('should support multi-layer suggestions without data loss', async () => {
- const visualizationMap = mockVisualizationMap();
- const frame = mockFrame(['a', 'b']);
- const datasourceMap = mockDatasourceMap();
+ frame = mockFrame(['a', 'b']);
datasourceMap.testDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
{
state: {},
@@ -352,147 +303,53 @@ describe('chart_switch', () => {
keptLayerIds: ['a', 'b'],
},
]);
-
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- expect(
- getMenuItem('visB', instance).find('[data-test-subj="lnsChartSwitchPopoverAlert_visB"]')
- ).toHaveLength(0);
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
+ expect(within(getMenuItem('visB')).queryByText(/warning/i)).not.toBeInTheDocument();
});
it('should indicate data loss if no data will be used', async () => {
- const visualizationMap = mockVisualizationMap();
visualizationMap.visB.getSuggestions.mockReturnValueOnce([]);
- const frame = mockFrame(['a']);
-
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- expect(
- getMenuItem('visB', instance)
- .find('[data-test-subj="lnsChartSwitchPopoverAlert_visB"]')
- .first()
- .props().type
- ).toEqual('warning');
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
+ expect(within(getMenuItem('visB')).queryByText(/warning/i)).toBeInTheDocument();
});
it('should not indicate data loss if there is no data', async () => {
- const visualizationMap = mockVisualizationMap();
visualizationMap.visB.getSuggestions.mockReturnValueOnce([]);
- const frame = mockFrame(['a']);
+ frame = mockFrame(['a']);
(frame.datasourceLayers.a?.getTableSpec as jest.Mock).mockReturnValue([]);
-
- const { instance } = await mountWithProvider(
- ,
-
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- expect(
- getMenuItem('visB', instance).find('[data-test-subj="lnsChartSwitchPopoverAlert_visB"]')
- ).toHaveLength(0);
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
+ expect(within(getMenuItem('visB')).queryByText(/warning/i)).not.toBeInTheDocument();
});
it('should not show a warning when the subvisualization is the same', async () => {
- const frame = mockFrame(['a', 'b', 'c']);
- const visualizationMap = mockVisualizationMap();
- visualizationMap.visC.getVisualizationTypeId.mockReturnValue('subvisC2');
- const switchVisualizationType = jest.fn(() => ({ type: 'subvisC1' }));
+ frame = mockFrame(['a', 'b', 'c']);
- visualizationMap.visC.switchVisualizationType = switchVisualizationType;
-
- const datasourceStates = mockDatasourceStates();
-
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- datasourceStates,
- activeDatasourceId: 'testDatasource',
- visualization: {
- activeId: 'visC',
- state: { type: 'subvisC2' },
- },
+ visualizationMap.visC.getVisualizationTypeId.mockReturnValue('subvisC2');
+ visualizationMap.visC.switchVisualizationType = jest.fn(() => ({ type: 'subvisC1' }));
+ const { openChartSwitch, getMenuItem } = renderChartSwitch(undefined, {
+ preloadedStateOverrides: {
+ visualization: {
+ activeId: 'visC',
+ state: { type: 'subvisC2' },
},
- }
- );
-
- expect(
- getMenuItem('subvisC2', instance).find(
- '[data-test-subj="lnsChartSwitchPopoverAlert_subvisC2"]'
- )
- ).toHaveLength(0);
+ },
+ });
+ openChartSwitch();
+ expect(within(getMenuItem('subvisC2')).queryByText(/warning/i)).not.toBeInTheDocument();
});
it('should get suggestions when switching subvisualization', async () => {
- const visualizationMap = mockVisualizationMap();
visualizationMap.visB.getSuggestions.mockReturnValueOnce([]);
- const frame = mockFrame(['a', 'b', 'c']);
- const datasourceMap = mockDatasourceMap();
+ frame = mockFrame(['a', 'b', 'c']);
datasourceMap.testDatasource.getLayers.mockReturnValue(['a', 'b', 'c']);
- const datasourceStates = mockDatasourceStates();
- const { instance, lensStore } = await mountWithProvider(
- ,
- {
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- preloadedState: {
- datasourceStates,
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
+ const { openChartSwitch, switchToVis, store } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- switchTo('visB', instance);
expect(datasourceMap.testDatasource.removeLayer).toHaveBeenCalledWith({}, 'a');
expect(datasourceMap.testDatasource.removeLayer).toHaveBeenCalledWith({}, 'b');
expect(datasourceMap.testDatasource.removeLayer).toHaveBeenCalledWith({}, 'c');
@@ -502,7 +359,7 @@ describe('chart_switch', () => {
})
);
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
@@ -517,75 +374,49 @@ describe('chart_switch', () => {
});
it('should query main palette from active chart and pass into suggestions', async () => {
- const visualizationMap = mockVisualizationMap();
- const mockPalette: PaletteOutput = { type: 'palette', name: 'mock' };
- visualizationMap.visA.getMainPalette = jest.fn(() => ({
+ const legacyPalette: SuggestionRequest['mainPalette'] = {
type: 'legacyPalette',
- value: mockPalette,
- }));
+ value: { type: 'palette', name: 'mock' },
+ };
+ visualizationMap.visA.getMainPalette = jest.fn(() => legacyPalette);
visualizationMap.visB.getSuggestions.mockReturnValueOnce([]);
- const frame = mockFrame(['a', 'b', 'c']);
- const currentVisState = {};
- const datasourceMap = mockDatasourceMap();
+ frame = mockFrame(['a', 'b', 'c']);
datasourceMap.testDatasource.getLayers.mockReturnValue(['a', 'b', 'c']);
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: currentVisState,
- },
- },
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- }
- );
-
- switchTo('visB', instance);
+ const { openChartSwitch, switchToVis } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- expect(visualizationMap.visA.getMainPalette).toHaveBeenCalledWith(currentVisState);
+ expect(visualizationMap.visA.getMainPalette).toHaveBeenCalledWith('state from a');
expect(visualizationMap.visB.getSuggestions).toHaveBeenCalledWith(
expect.objectContaining({
keptLayerIds: ['a'],
- mainPalette: { type: 'legacyPalette', value: mockPalette },
+ mainPalette: legacyPalette,
})
);
});
it('should not remove layers when switching between subtypes', async () => {
- const frame = mockFrame(['a', 'b', 'c']);
- const visualizationMap = mockVisualizationMap();
- const switchVisualizationType = jest.fn(() => 'switched');
+ frame = mockFrame(['a', 'b', 'c']);
+ visualizationMap.visC.switchVisualizationType = jest.fn(() => 'switched');
- visualizationMap.visC.switchVisualizationType = switchVisualizationType;
- const datasourceMap = mockDatasourceMap();
- const { instance, lensStore } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visC',
- state: { type: 'subvisC1' },
- },
+ const { openChartSwitch, switchToVis, store } = renderChartSwitch(undefined, {
+ preloadedStateOverrides: {
+ visualization: {
+ activeId: 'visC',
+ state: { type: 'subvisC1' },
},
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- }
- );
+ },
+ });
- switchTo('subvisC3', instance);
- expect(switchVisualizationType).toHaveBeenCalledWith('subvisC3', { type: 'subvisC3' });
+ openChartSwitch();
+ switchToVis('subvisC3');
+ expect(visualizationMap.visC.switchVisualizationType).toHaveBeenCalledWith('subvisC3', {
+ type: 'subvisC3',
+ });
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
@@ -601,31 +432,22 @@ describe('chart_switch', () => {
});
it('should not remove layers and initialize with existing state when switching between subtypes without data', async () => {
- const frame = mockFrame(['a']);
const datasourceLayers = frame.datasourceLayers as Record;
datasourceLayers.a.getTableSpec = jest.fn().mockReturnValue([]);
- const visualizationMap = mockVisualizationMap();
+
visualizationMap.visC.getSuggestions = jest.fn().mockReturnValue([]);
visualizationMap.visC.switchVisualizationType = jest.fn(() => 'switched');
- const datasourceMap = mockDatasourceMap();
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visC',
- state: { type: 'subvisC1' },
- },
- },
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- }
- );
- switchTo('subvisC3', instance);
+ const { openChartSwitch, switchToVis } = renderChartSwitch(undefined, {
+ preloadedStateOverrides: {
+ visualization: {
+ activeId: 'visC',
+ state: { type: 'subvisC1' },
+ },
+ },
+ });
+ openChartSwitch();
+ switchToVis('subvisC3');
expect(visualizationMap.visC.switchVisualizationType).toHaveBeenCalledWith('subvisC3', {
type: 'subvisC1',
@@ -634,9 +456,8 @@ describe('chart_switch', () => {
});
it('should switch to the updated datasource state', async () => {
- const visualizationMap = mockVisualizationMap();
- const frame = mockFrame(['a', 'b']);
- const datasourceMap = mockDatasourceMap();
+ frame = mockFrame(['a', 'b']);
+
datasourceMap.testDatasource.getDatasourceSuggestionsFromCurrentState.mockReturnValue([
{
state: 'testDatasource suggestion',
@@ -666,33 +487,19 @@ describe('chart_switch', () => {
keptLayerIds: [],
},
]);
- const { instance, lensStore } = await mountWithProvider(
- ,
- {
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
- switchTo('visB', instance);
+ const { openChartSwitch, switchToVis, store } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
newVisualizationId: 'visB',
datasourceId: 'testDatasource',
datasourceState: 'testDatasource suggestion',
- visualizationState: 'suggestion visB',
+ visualizationState: 'visB initial state',
},
clearStagedPreview: true,
},
@@ -700,37 +507,18 @@ describe('chart_switch', () => {
});
it('should ensure the new visualization has the proper subtype', async () => {
- const visualizationMap = mockVisualizationMap();
- const switchVisualizationType = jest.fn(
+ visualizationMap.visB.switchVisualizationType = jest.fn(
(visualizationType, state) => `${state} ${visualizationType}`
);
+ const { openChartSwitch, switchToVis, store } = renderChartSwitch();
+ openChartSwitch();
+ switchToVis('visB');
- visualizationMap.visB.switchVisualizationType = switchVisualizationType;
- const datasourceMap = mockDatasourceMap();
- const { instance, lensStore } = await mountWithProvider(
- ,
- {
- storeDeps: mockStoreDeps({ datasourceMap, visualizationMap }),
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- switchTo('visB', instance);
-
- expect(lensStore.dispatch).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith({
type: 'lens/switchVisualization',
payload: {
suggestion: {
- visualizationState: 'suggestion visB visB',
+ visualizationState: 'visB initial state visB',
newVisualizationId: 'visB',
datasourceId: 'testDatasource',
datasourceState: {},
@@ -741,56 +529,27 @@ describe('chart_switch', () => {
});
it('should use the suggestion that matches the subtype', async () => {
- const visualizationMap = mockVisualizationMap();
- const switchVisualizationType = jest.fn();
-
- visualizationMap.visC.switchVisualizationType = switchVisualizationType;
- const datasourceMap = mockDatasourceMap();
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visC',
- state: { type: 'subvisC3' },
- },
+ const { openChartSwitch, switchToVis } = renderChartSwitch(undefined, {
+ preloadedStateOverrides: {
+ visualization: {
+ activeId: 'visC',
+ state: { type: 'subvisC3' },
},
- }
- );
-
- switchTo('subvisC1', instance);
- expect(switchVisualizationType).toHaveBeenCalledWith('subvisC1', {
+ },
+ });
+ openChartSwitch();
+ switchToVis('subvisC1');
+ expect(visualizationMap.visC.switchVisualizationType).toHaveBeenCalledWith('subvisC1', {
type: 'subvisC1',
notPrimary: true,
});
});
it('should show all visualization types', async () => {
- const datasourceMap = mockDatasourceMap();
- const { instance } = await mountWithProvider(
- ,
- {
- preloadedState: {
- visualization: {
- activeId: 'visA',
- state: {},
- },
- },
- }
- );
-
- showFlyout(instance);
-
- const allDisplayed = ['visA', 'visB', 'subvisC1', 'subvisC2', 'subvisC3'].every(
- (subType) => instance.find(`[data-test-subj="lnsChartSwitchPopover_${subType}"]`).length > 0
+ const { openChartSwitch, getMenuItem } = renderChartSwitch();
+ openChartSwitch();
+ const allDisplayed = ['visA', 'visB', 'subvisC1', 'subvisC2', 'subvisC3'].every((subType) =>
+ getMenuItem(subType)
);
expect(allDisplayed).toBeTruthy();
diff --git a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.tsx b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.tsx
index c40c81285dc15..2cb8fc75a6758 100644
--- a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.tsx
+++ b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/workspace_panel/chart_switch.tsx
@@ -55,7 +55,7 @@ interface VisualizationSelection {
sameDatasources?: boolean;
}
-interface Props {
+export interface ChartSwitchProps {
framePublicAPI: FramePublicAPI;
visualizationMap: VisualizationMap;
datasourceMap: DatasourceMap;
@@ -126,7 +126,7 @@ function getCurrentVisualizationId(
);
}
-export const ChartSwitch = memo(function ChartSwitch(props: Props) {
+export const ChartSwitch = memo(function ChartSwitch(props: ChartSwitchProps) {
const [flyoutOpen, setFlyoutOpen] = useState(false);
const dispatchLens = useLensDispatch();
const activeDatasourceId = useLensSelector(selectActiveDatasourceId);
@@ -494,7 +494,7 @@ export const ChartSwitch = memo(function ChartSwitch(props: Props) {
});
function getTopSuggestion(
- props: Props,
+ props: ChartSwitchProps,
visualizationId: string,
datasourceStates: DatasourceStates,
visualization: VisualizationState,
diff --git a/x-pack/plugins/lens/public/mocks/datasource_mock.tsx b/x-pack/plugins/lens/public/mocks/datasource_mock.tsx
index 207c6562be557..daf56218bcd07 100644
--- a/x-pack/plugins/lens/public/mocks/datasource_mock.tsx
+++ b/x-pack/plugins/lens/public/mocks/datasource_mock.tsx
@@ -51,7 +51,7 @@ export function createMockDatasource(
removeLayer: jest.fn((state, layerId) => ({ newState: state, removedLayerIds: [layerId] })),
cloneLayer: jest.fn((_state, _layerId, _newLayerId, getNewId) => {}),
removeColumn: jest.fn((props) => {}),
- getLayers: jest.fn((_state) => []),
+ getLayers: jest.fn((_state) => ['a']),
uniqueLabels: jest.fn((_state, dataViews) => ({})),
getDropProps: jest.fn(),
onDrop: jest.fn(),
diff --git a/x-pack/plugins/lens/public/mocks/expression_renderer_mock.tsx b/x-pack/plugins/lens/public/mocks/expression_renderer_mock.tsx
index 42187662e0213..842d6ccbb713f 100644
--- a/x-pack/plugins/lens/public/mocks/expression_renderer_mock.tsx
+++ b/x-pack/plugins/lens/public/mocks/expression_renderer_mock.tsx
@@ -12,5 +12,7 @@ export function createExpressionRendererMock(): jest.Mock<
React.ReactElement,
[ReactExpressionRendererProps]
> {
- return jest.fn(({ expression }) => {expression || 'Expression renderer mock'});
+ return jest.fn(({ expression }) => (
+ {expression || 'Expression renderer mock'}
+ ));
}
diff --git a/x-pack/plugins/lens/public/mocks/store_mocks.tsx b/x-pack/plugins/lens/public/mocks/store_mocks.tsx
index 0d78a07d3dfce..ad028bfe107f0 100644
--- a/x-pack/plugins/lens/public/mocks/store_mocks.tsx
+++ b/x-pack/plugins/lens/public/mocks/store_mocks.tsx
@@ -69,7 +69,7 @@ export const renderWithReduxStore = (
{
preloadedState,
storeDeps,
- }: { preloadedState: Partial; storeDeps?: LensStoreDeps } = {
+ }: { preloadedState?: Partial; storeDeps?: LensStoreDeps } = {
preloadedState: {},
storeDeps: mockStoreDeps(),
}
diff --git a/x-pack/plugins/lens/public/mocks/visualization_mock.tsx b/x-pack/plugins/lens/public/mocks/visualization_mock.tsx
index b368f994756eb..663345c824095 100644
--- a/x-pack/plugins/lens/public/mocks/visualization_mock.tsx
+++ b/x-pack/plugins/lens/public/mocks/visualization_mock.tsx
@@ -6,39 +6,42 @@
*/
import React from 'react';
import { LayerTypes } from '@kbn/expression-xy-plugin/public';
+import { toExpression } from '@kbn/interpreter';
+import faker from 'faker';
import { Visualization, VisualizationMap } from '../types';
-export function createMockVisualization(id = 'testVis'): jest.Mocked {
- const layerId = 'layer1';
-
+export function createMockVisualization(
+ id = 'testVis',
+ layerIds = ['layer1']
+): jest.Mocked {
return {
id,
clearLayer: jest.fn((state, _layerId, _indexPatternId) => state),
removeLayer: jest.fn(),
- getLayerIds: jest.fn((_state) => [layerId]),
+ getLayerIds: jest.fn((_state) => layerIds),
getSupportedLayers: jest.fn(() => [{ type: LayerTypes.DATA, label: 'Data Layer' }]),
getLayerType: jest.fn((_state, _layerId) => LayerTypes.DATA),
visualizationTypes: [
{
icon: 'empty',
id,
- label: 'TEST',
+ label: faker.lorem.word(),
groupLabel: `${id}Group`,
},
],
appendLayer: jest.fn(),
- getVisualizationTypeId: jest.fn((_state) => 'empty'),
+ getVisualizationTypeId: jest.fn((_state) => id),
getDescription: jest.fn((_state) => ({ label: '' })),
switchVisualizationType: jest.fn((_, x) => x),
getSuggestions: jest.fn((_options) => []),
getRenderEventCounters: jest.fn((_state) => []),
- initialize: jest.fn((_addNewLayer, _state) => ({ newState: 'newState' })),
+ initialize: jest.fn((_addNewLayer, _state) => `${id} initial state`),
getConfiguration: jest.fn((props) => ({
groups: [
{
groupId: 'a',
groupLabel: 'a',
- layerId,
+ layerId: layerIds[0],
supportsMoreColumns: true,
accessors: [],
filterOperations: jest.fn(() => true),
@@ -46,7 +49,15 @@ export function createMockVisualization(id = 'testVis'): jest.Mocked 'expression'),
+ toExpression: jest.fn((state, datasourceLayers, attrs, datasourceExpressionsByLayers = {}) =>
+ toExpression({
+ type: 'expression',
+ chain: [
+ ...(datasourceExpressionsByLayers.first?.chain ?? []),
+ { type: 'function', function: 'testVis', arguments: {} },
+ ],
+ })
+ ),
toPreviewExpression: jest.fn((_state, _frame) => 'expression'),
getUserMessages: jest.fn((_state) => []),
setDimension: jest.fn(),
diff --git a/x-pack/plugins/lens/public/state_management/__snapshots__/load_initial.test.tsx.snap b/x-pack/plugins/lens/public/state_management/__snapshots__/load_initial.test.tsx.snap
index 4c09cefdd6de9..68d4e7d57e983 100644
--- a/x-pack/plugins/lens/public/state_management/__snapshots__/load_initial.test.tsx.snap
+++ b/x-pack/plugins/lens/public/state_management/__snapshots__/load_initial.test.tsx.snap
@@ -100,9 +100,7 @@ Object {
},
"visualization": Object {
"activeId": "testVis",
- "state": Object {
- "newState": "newState",
- },
+ "state": "testVis initial state",
},
},
}
diff --git a/x-pack/plugins/lens/public/state_management/load_initial.test.tsx b/x-pack/plugins/lens/public/state_management/load_initial.test.tsx
index ef9d84062a720..130e1294b9c95 100644
--- a/x-pack/plugins/lens/public/state_management/load_initial.test.tsx
+++ b/x-pack/plugins/lens/public/state_management/load_initial.test.tsx
@@ -199,7 +199,7 @@ describe('Initializing the store', () => {
expect(store.getState()).toEqual({
lens: expect.objectContaining({
visualization: {
- state: { newState: 'newState' }, // new vis gets initialized
+ state: 'testVis initial state', // new vis gets initialized
activeId: 'testVis',
},
activeDatasourceId: 'testDatasource2', // resets to first on the list
From 7f5486e1e6f4841d146a624ced3d5f2466ca6237 Mon Sep 17 00:00:00 2001
From: Ying Mao
Date: Thu, 15 Feb 2024 13:21:37 -0500
Subject: [PATCH 17/23] [Response Ops][Task Manager ] Adding ability for ad-hoc
task instance to specify timeout override (#175731)
Resolves https://github.com/elastic/kibana/issues/174353
## Summary
Adds ability for task instance to specify a timeout override that will
be used in place of the task type timeout when running an ad-hoc task.
In the future we may consider allowing timeout overrides for recurring
tasks but this PR limits usage to only ad-hoc task runs.
This timeout override is planned for use by backfill rule execution
tasks so the only usages in this PR are in the functional tests.
---
x-pack/plugins/task_manager/server/task.ts | 5 +
.../server/task_running/task_runner.test.ts | 85 ++++++++++++-
.../server/task_running/task_runner.ts | 25 ++--
.../server/task_validator.test.ts | 113 ++++++++++++++++++
.../task_manager/server/task_validator.ts | 51 ++++++--
.../sample_task_plugin/server/init_routes.ts | 1 +
.../sample_task_plugin/server/plugin.ts | 31 +++++
.../check_registered_task_types.ts | 1 +
.../task_manager/task_management.ts | 37 ++++++
9 files changed, 333 insertions(+), 16 deletions(-)
diff --git a/x-pack/plugins/task_manager/server/task.ts b/x-pack/plugins/task_manager/server/task.ts
index b7b86c50c8b08..728d175e8b98f 100644
--- a/x-pack/plugins/task_manager/server/task.ts
+++ b/x-pack/plugins/task_manager/server/task.ts
@@ -345,6 +345,11 @@ export interface TaskInstance {
* Indicates the number of skipped executions.
*/
numSkippedRuns?: number;
+
+ /*
+ * Optionally override the timeout defined in the task type for this specific task instance
+ */
+ timeoutOverride?: string;
}
/**
diff --git a/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts b/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
index 6735b3c0602b8..1e22e15a7f1e4 100644
--- a/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
+++ b/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
@@ -250,7 +250,7 @@ describe('TaskManagerRunner', () => {
expect(instance.enabled).not.toBeDefined();
});
- test('calculates retryAt by timeout if it exceeds the schedule when running a recurring task', async () => {
+ test('calculates retryAt by task type timeout if it exceeds the schedule when running a recurring task', async () => {
const timeoutMinutes = 1;
const intervalSeconds = 20;
const id = _.random(1, 20).toString();
@@ -286,6 +286,44 @@ describe('TaskManagerRunner', () => {
expect(instance.enabled).not.toBeDefined();
});
+ test('does not calculate retryAt by task instance timeout if defined for a recurring task', async () => {
+ const timeoutMinutes = 1;
+ const timeoutOverrideSeconds = 90;
+ const intervalSeconds = 20;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: {
+ interval: `${intervalSeconds}s`,
+ },
+ timeoutOverride: `${timeoutOverrideSeconds}s`,
+ enabled: true,
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
+
+ await runner.markTaskAsRunning();
+
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
+
+ expect(instance.retryAt!.getTime()).toEqual(
+ instance.startedAt!.getTime() + timeoutMinutes * 60 * 1000
+ );
+ expect(instance.enabled).not.toBeDefined();
+ });
+
test('sets startedAt, status, attempts and retryAt when claiming a task', async () => {
const timeoutMinutes = 1;
const id = _.random(1, 20).toString();
@@ -329,6 +367,51 @@ describe('TaskManagerRunner', () => {
expect(instance.enabled).not.toBeDefined();
});
+ test('test sets retryAt to task instance timeout override when defined when claiming an ad hoc task', async () => {
+ const timeoutSeconds = 60;
+ const timeoutOverrideSeconds = 90;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ enabled: true,
+ attempts: initialAttempts,
+ timeoutOverride: `${timeoutOverrideSeconds}s`,
+ schedule: undefined,
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutSeconds}s`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
+
+ await runner.markTaskAsRunning();
+
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
+
+ expect(instance.attempts).toEqual(initialAttempts + 1);
+ expect(instance.status).toBe('running');
+ expect(instance.startedAt!.getTime()).toEqual(Date.now());
+
+ const minRunAt = Date.now();
+ const maxRunAt = minRunAt + baseDelay * Math.pow(2, initialAttempts - 1);
+ expect(instance.retryAt!.getTime()).toBeGreaterThanOrEqual(
+ minRunAt + timeoutOverrideSeconds * 1000
+ );
+ expect(instance.retryAt!.getTime()).toBeLessThanOrEqual(
+ maxRunAt + timeoutOverrideSeconds * 1000
+ );
+
+ expect(instance.enabled).not.toBeDefined();
+ });
+
test('sets retryAt when there is an error', async () => {
const id = _.random(1, 20).toString();
const initialAttempts = _.random(1, 3);
diff --git a/x-pack/plugins/task_manager/server/task_running/task_runner.ts b/x-pack/plugins/task_manager/server/task_running/task_runner.ts
index faea2bfb7e446..2c1242629952c 100644
--- a/x-pack/plugins/task_manager/server/task_running/task_runner.ts
+++ b/x-pack/plugins/task_manager/server/task_running/task_runner.ts
@@ -260,10 +260,25 @@ export class TaskManagerRunner implements TaskRunner {
// this allows us to catch tasks that remain in Pending/Finalizing without being
// cleaned up
isReadyToRun(this.instance) ? this.instance.task.startedAt : this.instance.timestamp,
- this.definition.timeout
+ this.timeout
)!;
}
+ /*
+ * Gets the timeout of the current task. Uses the timeout
+ * defined by the task type unless this is an ad-hoc task that specifies an override
+ */
+ public get timeout() {
+ if (this.instance.task.schedule) {
+ // recurring tasks should use timeout in task type
+ return this.definition.timeout;
+ }
+
+ return this.instance.task.timeoutOverride
+ ? this.instance.task.timeoutOverride
+ : this.definition.timeout;
+ }
+
/**
* Gets the duration of the current task run
*/
@@ -521,17 +536,13 @@ export class TaskManagerRunner implements TaskRunner {
attempts,
retryAt:
(this.instance.task.schedule
- ? maxIntervalFromDate(
- now,
- this.instance.task.schedule.interval,
- this.definition.timeout
- )
+ ? maxIntervalFromDate(now, this.instance.task.schedule.interval, this.timeout)
: this.getRetryDelay({
attempts,
// Fake an error. This allows retry logic when tasks keep timing out
// and lets us set a proper "retryAt" value each time.
error: new Error('Task timeout'),
- addDuration: this.definition.timeout,
+ addDuration: this.timeout,
})) ?? null,
// This is a safe conversion as we're setting the startAt above
},
diff --git a/x-pack/plugins/task_manager/server/task_validator.test.ts b/x-pack/plugins/task_manager/server/task_validator.test.ts
index 52822adf6f49f..01eb7097d15c2 100644
--- a/x-pack/plugins/task_manager/server/task_validator.test.ts
+++ b/x-pack/plugins/task_manager/server/task_validator.test.ts
@@ -322,6 +322,49 @@ describe('TaskValidator', () => {
expect(result).toEqual(task);
});
+ it(`should return the task with timeoutOverride as-is whenever schedule is not defined and override is valid`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskValidator = new TaskValidator({
+ logger: mockLogger(),
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({ timeoutOverride: '1s' });
+ const result = taskValidator.getValidatedTaskInstanceForUpdating(task);
+ expect(result).toEqual(task);
+ });
+
+ it(`should return the task with timeoutOverride stripped whenever schedule and override are defined`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskValidator = new TaskValidator({
+ logger: mockLogger(),
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ schedule: { interval: '1d' },
+ timeoutOverride: '1s',
+ });
+ const { timeoutOverride, ...taskWithoutOverride } = task;
+ const result = taskValidator.getValidatedTaskInstanceForUpdating(task);
+ expect(result).toEqual(taskWithoutOverride);
+ });
+
+ it(`should return the task with timeoutOverride stripped whenever override is invalid`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskValidator = new TaskValidator({
+ logger: mockLogger(),
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ timeoutOverride: 'foo',
+ });
+ const { timeoutOverride, ...taskWithoutOverride } = task;
+ const result = taskValidator.getValidatedTaskInstanceForUpdating(task);
+ expect(result).toEqual(taskWithoutOverride);
+ });
+
// TODO: Remove skip once all task types have defined their state schema.
// https://github.com/elastic/kibana/issues/159347
it.skip(`should fail to validate the state schema when the task type doesn't have stateSchemaByVersion defined`, () => {
@@ -394,4 +437,74 @@ describe('TaskValidator', () => {
).toThrowErrorMatchingInlineSnapshot(`"[bar]: definition for this key is missing"`);
});
});
+
+ describe('validateTimeoutOverride()', () => {
+ it(`should validate when specifying a valid timeout override field with no schedule`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskValidator = new TaskValidator({
+ logger: mockLogger(),
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ timeoutOverride: '1s',
+ state: { foo: 'foo' },
+ });
+ expect(taskValidator.validateTimeoutOverride(task)).toEqual(task);
+ });
+
+ it(`should validate when specifying a schedule and no timeout override`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskValidator = new TaskValidator({
+ logger: mockLogger(),
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ schedule: { interval: '1d' },
+ state: { foo: 'foo' },
+ });
+ expect(taskValidator.validateTimeoutOverride(task)).toEqual(task);
+ });
+
+ it(`should fail to validate when specifying a valid timeout override field and recurring schedule`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const logger = mockLogger();
+ const taskValidator = new TaskValidator({
+ logger,
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ timeoutOverride: '1s',
+ schedule: { interval: '1d' },
+ state: { foo: 'foo' },
+ });
+
+ const { timeoutOverride, ...taskWithoutOverride } = task;
+ expect(taskValidator.validateTimeoutOverride(task)).toEqual(taskWithoutOverride);
+ expect(logger.warn).toHaveBeenCalledWith(
+ `[TaskValidator] cannot specify timeout override 1s when scheduling a recurring task`
+ );
+ });
+
+ it(`should fail to validate when specifying an invalid timeout override field`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const logger = mockLogger();
+ const taskValidator = new TaskValidator({
+ logger,
+ definitions,
+ allowReadingInvalidState: false,
+ });
+ const task = taskManagerMock.createTask({
+ timeoutOverride: 'foo',
+ state: { foo: 'foo' },
+ });
+ const { timeoutOverride, ...taskWithoutOverride } = task;
+ expect(taskValidator.validateTimeoutOverride(task)).toEqual(taskWithoutOverride);
+ expect(logger.warn).toHaveBeenCalledWith(
+ `[TaskValidator] Invalid timeout override "foo". Timeout must be of the form "{number}{cadence}" where number is an integer. Example: 5m. This timeout override will be ignored.`
+ );
+ });
+ });
});
diff --git a/x-pack/plugins/task_manager/server/task_validator.ts b/x-pack/plugins/task_manager/server/task_validator.ts
index 61d9a903dd5b4..ddc7304e4e31e 100644
--- a/x-pack/plugins/task_manager/server/task_validator.ts
+++ b/x-pack/plugins/task_manager/server/task_validator.ts
@@ -5,11 +5,13 @@
* 2.0.
*/
-import { max, memoize } from 'lodash';
+import { max, memoize, omit } from 'lodash';
import type { Logger } from '@kbn/core/server';
import type { ObjectType } from '@kbn/config-schema';
import { TaskTypeDictionary } from './task_type_dictionary';
import type { TaskInstance, ConcreteTaskInstance, TaskDefinition } from './task';
+import { isInterval, parseIntervalAsMillisecond } from './lib/intervals';
+import { isErr, tryAsResult } from './lib/result_type';
interface TaskValidatorOpts {
allowReadingInvalidState: boolean;
@@ -98,34 +100,67 @@ export class TaskValidator {
task: T,
options: { validate: boolean } = { validate: true }
): T {
+ const taskWithValidatedTimeout = this.validateTimeoutOverride(task);
+
if (!options.validate) {
- return task;
+ return taskWithValidatedTimeout;
}
// In the scenario the task is unused / deprecated and Kibana needs to manipulate the task,
// we'll do a pass-through for those
- if (!this.definitions.has(task.taskType)) {
- return task;
+ if (!this.definitions.has(taskWithValidatedTimeout.taskType)) {
+ return taskWithValidatedTimeout;
}
- const taskTypeDef = this.definitions.get(task.taskType);
+ const taskTypeDef = this.definitions.get(taskWithValidatedTimeout.taskType);
const latestStateSchema = this.cachedGetLatestStateSchema(taskTypeDef);
// TODO: Remove once all task types have defined their state schema.
// https://github.com/elastic/kibana/issues/159347
// Otherwise, failures on read / write would occur. (don't forget to unskip test)
if (!latestStateSchema) {
- return task;
+ return taskWithValidatedTimeout;
}
// We are doing a write operation which must validate against the latest state schema
return {
- ...task,
- state: this.getValidatedStateSchema(task.state, task.taskType, latestStateSchema, 'forbid'),
+ ...taskWithValidatedTimeout,
+ state: this.getValidatedStateSchema(
+ taskWithValidatedTimeout.state,
+ taskWithValidatedTimeout.taskType,
+ latestStateSchema,
+ 'forbid'
+ ),
stateVersion: latestStateSchema?.version,
};
}
+ public validateTimeoutOverride(task: T): T {
+ if (task.timeoutOverride) {
+ if (
+ !isInterval(task.timeoutOverride) ||
+ isErr(tryAsResult(() => parseIntervalAsMillisecond(task.timeoutOverride!)))
+ ) {
+ this.logger.warn(
+ `[TaskValidator] Invalid timeout override "${task.timeoutOverride}". Timeout must be of the form "{number}{cadence}" where number is an integer. Example: 5m. This timeout override will be ignored.`
+ );
+
+ return omit(task, 'timeoutOverride') as T;
+ }
+ }
+
+ // Only allow timeoutOverride if schedule is not defined
+ if (!!task.timeoutOverride && !!task.schedule) {
+ this.logger.warn(
+ `[TaskValidator] cannot specify timeout override ${task.timeoutOverride} when scheduling a recurring task`
+ );
+
+ return omit(task, 'timeoutOverride') as T;
+ }
+
+ return task;
+ }
+
private migrateTaskState(
state: ConcreteTaskInstance['state'],
currentVersion: number | undefined,
diff --git a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
index 01f8cd6ba4bc9..b1c8d6dd028b2 100644
--- a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
+++ b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
@@ -62,6 +62,7 @@ export function initRoutes(
params: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
state: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
id: schema.maybe(schema.string()),
+ timeoutOverride: schema.maybe(schema.string()),
}),
}),
},
diff --git a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
index 0e3a2bb993fe7..52c747d0ec786 100644
--- a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
+++ b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
@@ -167,6 +167,37 @@ export class SampleTaskManagerFixturePlugin
},
}),
},
+ sampleAdHocTaskTimingOut: {
+ title: 'Sample Ad-Hoc Task that Times Out',
+ description: 'A sample task that times out.',
+ maxAttempts: 3,
+ timeout: '1s',
+ createTaskRunner: ({ taskInstance }: { taskInstance: ConcreteTaskInstance }) => {
+ let isCancelled: boolean = false;
+ return {
+ async run() {
+ // wait for 15 seconds
+ await new Promise((r) => setTimeout(r, 15000));
+
+ if (!isCancelled) {
+ const [{ elasticsearch }] = await core.getStartServices();
+ await elasticsearch.client.asInternalUser.index({
+ index: '.kibana_task_manager_test_result',
+ body: {
+ type: 'task',
+ taskType: 'sampleAdHocTaskTimingOut',
+ taskId: taskInstance.id,
+ },
+ refresh: true,
+ });
+ }
+ },
+ async cancel() {
+ isCancelled = true;
+ },
+ };
+ },
+ },
sampleRecurringTaskWhichHangs: {
title: 'Sample Recurring Task that Hangs for a minute',
description: 'A sample task that Hangs for a minute on each run.',
diff --git a/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts b/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
index 3b71c99df2e5b..e5d5e8fbd6fb7 100644
--- a/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
+++ b/x-pack/test/plugin_api_integration/test_suites/task_manager/check_registered_task_types.ts
@@ -20,6 +20,7 @@ export default function ({ getService }: FtrProviderContext) {
}
const TEST_TYPES = [
+ 'sampleAdHocTaskTimingOut',
'lowPriorityTask',
'sampleOneTimeTaskThrowingError',
'sampleRecurringTaskTimingOut',
diff --git a/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts b/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
index f897e0fa05035..a430993933c8b 100644
--- a/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
+++ b/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
@@ -791,6 +791,43 @@ export default function ({ getService }: FtrProviderContext) {
});
});
+ it('should fail to schedule recurring task with timeout override', async () => {
+ const task = await scheduleTask({
+ taskType: 'sampleRecurringTaskTimingOut',
+ schedule: { interval: '1s' },
+ timeoutOverride: '30s',
+ params: {},
+ });
+
+ expect(task.timeoutOverride).to.be(undefined);
+ });
+
+ it('should allow timeout override for ad hoc tasks', async () => {
+ const task = await scheduleTask({
+ taskType: 'sampleAdHocTaskTimingOut',
+ timeoutOverride: '30s',
+ params: {},
+ });
+
+ expect(task.timeoutOverride).to.be('30s');
+
+ // this task type is set to time out after 1s but the task runner
+ // will wait 15 seconds and then index a document if it hasn't timed out
+ // this test overrides the timeout to 30s and checks if the expected
+ // document was indexed. presence of indexed document means the task
+ // timeout override was respected
+ await retry.try(async () => {
+ const [scheduledTask] = (await currentTasks()).docs;
+ expect(scheduledTask?.id).to.eql(task.id);
+ });
+
+ await retry.try(async () => {
+ const docs: RawDoc[] = await historyDocs(task.id);
+ expect(docs.length).to.eql(1);
+ expect(docs[0]._source.taskType).to.eql('sampleAdHocTaskTimingOut');
+ });
+ });
+
it('should bulk update schedules for multiple tasks', async () => {
const initialTime = Date.now();
const tasks = await Promise.all([
From 19cc3797d36b08bc96fbaa21ece34f037add8559 Mon Sep 17 00:00:00 2001
From: Davis McPhee
Date: Thu, 15 Feb 2024 14:28:55 -0400
Subject: [PATCH 18/23] Unskip flaky test from #167643 (#176942)
## Summary
This PR unskips the tests skipped in #167643.
Flaky test runner x50:
https://buildkite.com/elastic/kibana-flaky-test-suite-runner/builds/5186.
Resolves #167643.
### Checklist
- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [x] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [ ] Any UI touched in this PR is usable by keyboard only (learn more
about [keyboard accessibility](https://webaim.org/techniques/keyboard/))
- [ ] Any UI touched in this PR does not create any new axe failures
(run axe in browser:
[FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/),
[Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US))
- [ ] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [ ] This renders correctly on smaller devices using a responsive
layout. (You can test this [in your
browser](https://www.browserstack.com/guide/responsive-testing-on-local-server))
- [ ] This was checked for [cross-browser
compatibility](https://www.elastic.co/support/matrix#matrix_browsers)
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
Co-authored-by: Julia Rechkunova
---
.../test_suites/common/examples/partial_results/index.ts | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/x-pack/test_serverless/functional/test_suites/common/examples/partial_results/index.ts b/x-pack/test_serverless/functional/test_suites/common/examples/partial_results/index.ts
index c5a5bb2995217..415174adf0d0a 100644
--- a/x-pack/test_serverless/functional/test_suites/common/examples/partial_results/index.ts
+++ b/x-pack/test_serverless/functional/test_suites/common/examples/partial_results/index.ts
@@ -12,8 +12,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const testSubjects = getService('testSubjects');
const PageObjects = getPageObjects(['common', 'svlCommonPage']);
- // FLAKY: https://github.com/elastic/kibana/issues/167643
- describe.skip('Partial Results Example', function () {
+ describe('Partial Results Example', function () {
before(async () => {
await PageObjects.svlCommonPage.loginAsAdmin();
await PageObjects.common.navigateToApp('partialResultsExample');
From b4836ace2fb3d6c38df24294732b09be8f713d9f Mon Sep 17 00:00:00 2001
From: Davis McPhee
Date: Thu, 15 Feb 2024 14:29:23 -0400
Subject: [PATCH 19/23] Unskip flaky test from #175579 (#176981)
## Summary
This PR unskips the tests skipped in #175579.
Flaky test runner x50:
https://buildkite.com/elastic/kibana-flaky-test-suite-runner/builds/5194.
Resolves #175579.
### Checklist
- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [x] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [ ] Any UI touched in this PR is usable by keyboard only (learn more
about [keyboard accessibility](https://webaim.org/techniques/keyboard/))
- [ ] Any UI touched in this PR does not create any new axe failures
(run axe in browser:
[FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/),
[Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US))
- [ ] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [ ] This renders correctly on smaller devices using a responsive
layout. (You can test this [in your
browser](https://www.browserstack.com/guide/responsive-testing-on-local-server))
- [ ] This was checked for [cross-browser
compatibility](https://www.elastic.co/support/matrix#matrix_browsers)
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
Co-authored-by: Julia Rechkunova
---
.../test/examples/search_examples/partial_results_example.ts | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/x-pack/test/examples/search_examples/partial_results_example.ts b/x-pack/test/examples/search_examples/partial_results_example.ts
index 83eb900ef0ff4..269b2e79ab38f 100644
--- a/x-pack/test/examples/search_examples/partial_results_example.ts
+++ b/x-pack/test/examples/search_examples/partial_results_example.ts
@@ -14,8 +14,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const PageObjects = getPageObjects(['common']);
const retry = getService('retry');
- // FLAKY: https://github.com/elastic/kibana/issues/175579
- describe.skip('Partial results example', () => {
+ describe('Partial results example', () => {
before(async () => {
await PageObjects.common.navigateToApp('searchExamples');
await testSubjects.click('/search');
From e136a9318215d5913a5e957aec5d9ad0b8132506 Mon Sep 17 00:00:00 2001
From: Zacqary Adam Xeper
Date: Thu, 15 Feb 2024 12:30:08 -0600
Subject: [PATCH 20/23] [RAM] Fix bug where select all rules bypasses filters
(#176962)
## Summary
Fixes #176867
A bug introduced in https://github.com/elastic/kibana/pull/174954
bypassed most filters when using Select All on the Rules List. This was
because the names of the filter properties changed, and no longer
matched what the `useBulkEditSelect` hook was expecting.
Because all of these types were optional, it didn't trigger any type
errors.
This syncs up the type definitions with the new filter store hook, and
adds a functional test to make sure filters are working on bulk actions
when clicking the select all button.
### Checklist
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
---
.../hooks/use_bulk_edit_select.test.tsx | 15 ++--
.../hooks/use_bulk_edit_select.tsx | 55 ++++--------
.../rules_list/components/rules_list.tsx | 3 +-
.../rules_list/bulk_actions.ts | 87 ++++++++++++++++++-
4 files changed, 111 insertions(+), 49 deletions(-)
diff --git a/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.test.tsx b/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.test.tsx
index 07bf3516fbdef..1a5e3f278eb5e 100644
--- a/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.test.tsx
+++ b/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.test.tsx
@@ -45,8 +45,7 @@ describe('useBulkEditSelectTest', () => {
useBulkEditSelect({
items,
totalItemCount: 4,
- tagsFilter: ['test: 123'],
- searchText: 'rules*',
+ filters: { tags: ['test: 123'], searchText: 'rules*' },
})
);
@@ -58,8 +57,7 @@ describe('useBulkEditSelectTest', () => {
useBulkEditSelect({
items,
totalItemCount: 4,
- tagsFilter: ['test: 123'],
- searchText: 'rules*',
+ filters: { tags: ['test: 123'], searchText: 'rules*' },
})
);
@@ -107,8 +105,7 @@ describe('useBulkEditSelectTest', () => {
useBulkEditSelect({
items,
totalItemCount: 4,
- tagsFilter: ['test: 123'],
- searchText: 'rules*',
+ filters: { tags: ['test: 123'], searchText: 'rules*' },
})
);
@@ -124,8 +121,10 @@ describe('useBulkEditSelectTest', () => {
useBulkEditSelect({
items,
totalItemCount: 4,
- tagsFilter: ['test: 123'],
- searchText: 'rules*',
+ filters: {
+ tags: ['test: 123'],
+ searchText: 'rules*',
+ },
})
);
diff --git a/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.tsx b/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.tsx
index 84e762cbe93f8..fe70b4fa0e3bc 100644
--- a/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.tsx
+++ b/x-pack/plugins/triggers_actions_ui/public/application/hooks/use_bulk_edit_select.tsx
@@ -7,7 +7,7 @@
import { useReducer, useMemo, useCallback } from 'react';
import { fromKueryExpression, nodeBuilder } from '@kbn/es-query';
import { mapFiltersToKueryNode } from '../lib/rule_api/map_filters_to_kuery_node';
-import { RuleTableItem, RuleStatus } from '../../types';
+import { RuleTableItem, RulesListFilters } from '../../types';
interface BulkEditSelectionState {
selectedIds: Set;
@@ -73,27 +73,11 @@ const reducer = (state: BulkEditSelectionState, action: Action) => {
interface UseBulkEditSelectProps {
totalItemCount: number;
items: RuleTableItem[];
- typesFilter?: string[];
- actionTypesFilter?: string[];
- tagsFilter?: string[];
- ruleExecutionStatusesFilter?: string[];
- ruleLastRunOutcomesFilter?: string[];
- ruleStatusesFilter?: RuleStatus[];
- searchText?: string;
+ filters?: RulesListFilters;
}
export function useBulkEditSelect(props: UseBulkEditSelectProps) {
- const {
- totalItemCount = 0,
- items = [],
- typesFilter,
- actionTypesFilter,
- tagsFilter,
- ruleExecutionStatusesFilter,
- ruleLastRunOutcomesFilter,
- ruleStatusesFilter,
- searchText,
- } = props;
+ const { totalItemCount = 0, items = [], filters } = props;
const [state, dispatch] = useReducer(reducer, {
...initialState,
@@ -187,15 +171,20 @@ export function useBulkEditSelect(props: UseBulkEditSelectProps) {
const getFilterKueryNode = useCallback(
(idsToExclude?: string[]) => {
- const ruleFilterKueryNode = mapFiltersToKueryNode({
- typesFilter,
- actionTypesFilter,
- tagsFilter,
- ruleExecutionStatusesFilter,
- ruleLastRunOutcomesFilter,
- ruleStatusesFilter,
- searchText,
- });
+ const ruleFilterKueryNode = mapFiltersToKueryNode(
+ filters
+ ? {
+ typesFilter: filters.types,
+ actionTypesFilter: filters.actionTypes,
+ tagsFilter: filters.tags,
+ ruleExecutionStatusesFilter: filters.ruleExecutionStatuses,
+ ruleLastRunOutcomesFilter: filters.ruleLastRunOutcomes,
+ ruleParamsFilter: filters.ruleParams,
+ ruleStatusesFilter: filters.ruleStatuses,
+ searchText: filters.searchText,
+ }
+ : {}
+ );
if (idsToExclude && idsToExclude.length) {
const excludeFilter = fromKueryExpression(
@@ -209,15 +198,7 @@ export function useBulkEditSelect(props: UseBulkEditSelectProps) {
return ruleFilterKueryNode;
},
- [
- typesFilter,
- actionTypesFilter,
- tagsFilter,
- ruleExecutionStatusesFilter,
- ruleLastRunOutcomesFilter,
- ruleStatusesFilter,
- searchText,
- ]
+ [filters]
);
const getFilter = useCallback(() => {
diff --git a/x-pack/plugins/triggers_actions_ui/public/application/sections/rules_list/components/rules_list.tsx b/x-pack/plugins/triggers_actions_ui/public/application/sections/rules_list/components/rules_list.tsx
index 9b66a65949674..b381035fbfc61 100644
--- a/x-pack/plugins/triggers_actions_ui/public/application/sections/rules_list/components/rules_list.tsx
+++ b/x-pack/plugins/triggers_actions_ui/public/application/sections/rules_list/components/rules_list.tsx
@@ -363,8 +363,7 @@ export const RulesList = ({
} = useBulkEditSelect({
totalItemCount: rulesState.totalItemCount,
items: tableItems,
- ...filters,
- typesFilter: rulesTypesFilter,
+ filters: { ...filters, types: rulesTypesFilter },
});
const handleUpdateFiltersEffect = useCallback(
diff --git a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/rules_list/bulk_actions.ts b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/rules_list/bulk_actions.ts
index 4c545e20cb687..c8cf8903a4275 100644
--- a/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/rules_list/bulk_actions.ts
+++ b/x-pack/test/functional_with_es_ssl/apps/triggers_actions_ui/rules_list/bulk_actions.ts
@@ -14,7 +14,12 @@
import expect from '@kbn/expect';
import { FtrProviderContext } from '../../../ftr_provider_context';
-import { createAlert, scheduleRule, snoozeAlert } from '../../../lib/alert_api_actions';
+import {
+ createAlert,
+ createAlertManualCleanup,
+ scheduleRule,
+ snoozeAlert,
+} from '../../../lib/alert_api_actions';
import { ObjectRemover } from '../../../lib/object_remover';
export default ({ getPageObjects, getService }: FtrProviderContext) => {
@@ -30,7 +35,7 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
await testSubjects.click('rulesTab');
}
- describe('rules list', () => {
+ describe('rules list bulk actions', () => {
before(async () => {
await pageObjects.common.navigateToApp('triggersActions');
await testSubjects.click('rulesTab');
@@ -206,5 +211,83 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
expect(toastTitle).to.eql('Updated API key for 1 rule.');
});
});
+
+ it('should apply filters to bulk actions when using the select all button', async () => {
+ const rule1 = await createAlert({
+ supertest,
+ objectRemover,
+ overwrites: { name: 'a' },
+ });
+ const rule2 = await createAlertManualCleanup({
+ supertest,
+ overwrites: { name: 'b', rule_type_id: 'test.always-firing' },
+ });
+ const rule3 = await createAlert({
+ supertest,
+ objectRemover,
+ overwrites: { name: 'c' },
+ });
+
+ await refreshAlertsList();
+ expect(await testSubjects.getVisibleText('totalRulesCount')).to.be('3 rules');
+
+ await testSubjects.click('ruleTypeFilterButton');
+ await testSubjects.existOrFail('ruleTypetest.noopFilterOption');
+ await testSubjects.click('ruleTypetest.noopFilterOption');
+ await testSubjects.click(`checkboxSelectRow-${rule1.id}`);
+ await testSubjects.click('selectAllRulesButton');
+
+ await testSubjects.click('showBulkActionButton');
+ await testSubjects.click('bulkDisable');
+ await testSubjects.existOrFail('untrackAlertsModal');
+ await testSubjects.click('confirmModalConfirmButton');
+
+ await retry.try(async () => {
+ const toastTitle = await toasts.getTitleAndDismiss();
+ expect(toastTitle).to.eql('Disabled 2 rules');
+ });
+
+ await testSubjects.click('rules-list-clear-filter');
+ await refreshAlertsList();
+
+ await pageObjects.triggersActionsUI.searchAlerts(rule1.name);
+ expect(await testSubjects.getVisibleText('statusDropdown')).to.be('Disabled');
+ await pageObjects.triggersActionsUI.searchAlerts(rule2.name);
+ expect(await testSubjects.getVisibleText('statusDropdown')).to.be('Enabled');
+ await pageObjects.triggersActionsUI.searchAlerts(rule3.name);
+ expect(await testSubjects.getVisibleText('statusDropdown')).to.be('Disabled');
+
+ await testSubjects.click('rules-list-clear-filter');
+ await refreshAlertsList();
+
+ await testSubjects.click('ruleStatusFilterButton');
+ await testSubjects.existOrFail('ruleStatusFilterOption-enabled');
+ await testSubjects.click('ruleStatusFilterOption-enabled');
+ await testSubjects.click(`checkboxSelectRow-${rule2.id}`);
+ await testSubjects.click('selectAllRulesButton');
+
+ await testSubjects.click('showBulkActionButton');
+ await testSubjects.click('bulkDelete');
+ await testSubjects.existOrFail('rulesDeleteConfirmation');
+ await testSubjects.click('confirmModalConfirmButton');
+
+ await retry.try(async () => {
+ const toastTitle = await toasts.getTitleAndDismiss();
+ expect(toastTitle).to.eql('Deleted 1 rule');
+ });
+
+ await testSubjects.click('rules-list-clear-filter');
+ await refreshAlertsList();
+
+ await retry.try(
+ async () => {
+ expect(await testSubjects.getVisibleText('totalRulesCount')).to.be('2 rules');
+ },
+ async () => {
+ // If the delete fails, make sure rule2 gets cleaned up
+ objectRemover.add(rule2.id, 'alert', 'alerts');
+ }
+ );
+ });
});
};
From 8e7e5b23ec4f3b148c6a6e33f5b157d6266b710b Mon Sep 17 00:00:00 2001
From: Ersin Erdal <92688503+ersin-erdal@users.noreply.github.com>
Date: Thu, 15 Feb 2024 20:54:05 +0100
Subject: [PATCH 21/23] Revert changes in PR#175981 (#177054)
Reverts changes in https://github.com/elastic/kibana/pull/175981
---
.../rules_client/methods/get_alert_state.ts | 29 +++------
.../tests/get_alert_state.test.ts | 65 -------------------
2 files changed, 9 insertions(+), 85 deletions(-)
diff --git a/x-pack/plugins/alerting/server/rules_client/methods/get_alert_state.ts b/x-pack/plugins/alerting/server/rules_client/methods/get_alert_state.ts
index 4da913a06fe79..6497428e1c2f2 100644
--- a/x-pack/plugins/alerting/server/rules_client/methods/get_alert_state.ts
+++ b/x-pack/plugins/alerting/server/rules_client/methods/get_alert_state.ts
@@ -5,7 +5,6 @@
* 2.0.
*/
-import { SavedObjectsErrorHelpers } from '@kbn/core-saved-objects-server';
import { RuleTaskState } from '../../types';
import { taskInstanceToAlertTaskInstance } from '../../task_runner/alert_task_instance';
import { ReadOperations, AlertingAuthorizationEntity } from '../../authorization';
@@ -19,28 +18,18 @@ export async function getAlertState(
context: RulesClientContext,
{ id }: GetAlertStateParams
): Promise {
- const rule = await get(context, { id });
+ const alert = await get(context, { id });
await context.authorization.ensureAuthorized({
- ruleTypeId: rule.alertTypeId,
- consumer: rule.consumer,
+ ruleTypeId: alert.alertTypeId,
+ consumer: alert.consumer,
operation: ReadOperations.GetRuleState,
entity: AlertingAuthorizationEntity.Rule,
});
- if (rule.scheduledTaskId) {
- try {
- const { state } = taskInstanceToAlertTaskInstance(
- await context.taskManager.get(rule.scheduledTaskId),
- rule
- );
- return state;
- } catch (e) {
- if (SavedObjectsErrorHelpers.isNotFoundError(e)) {
- context.logger.warn(`Task (${rule.scheduledTaskId}) not found`);
- } else {
- context.logger.warn(
- `An error occurred when getting the task state for (${rule.scheduledTaskId})`
- );
- }
- }
+ if (alert.scheduledTaskId) {
+ const { state } = taskInstanceToAlertTaskInstance(
+ await context.taskManager.get(alert.scheduledTaskId),
+ alert
+ );
+ return state;
}
}
diff --git a/x-pack/plugins/alerting/server/rules_client/tests/get_alert_state.test.ts b/x-pack/plugins/alerting/server/rules_client/tests/get_alert_state.test.ts
index 951e85c023523..63d86843512c0 100644
--- a/x-pack/plugins/alerting/server/rules_client/tests/get_alert_state.test.ts
+++ b/x-pack/plugins/alerting/server/rules_client/tests/get_alert_state.test.ts
@@ -22,7 +22,6 @@ import { AlertingAuthorization } from '../../authorization/alerting_authorizatio
import { ActionsAuthorization } from '@kbn/actions-plugin/server';
import { getBeforeSetup } from './lib';
import { RULE_SAVED_OBJECT_TYPE } from '../../saved_objects';
-import { SavedObjectsErrorHelpers } from '@kbn/core-saved-objects-server';
const taskManager = taskManagerMock.createStart();
const ruleTypeRegistry = ruleTypeRegistryMock.create();
@@ -176,70 +175,6 @@ describe('getAlertState()', () => {
expect(taskManager.get).toHaveBeenCalledWith(scheduledTaskId);
});
- test('logs a warning if the task not found', async () => {
- const rulesClient = new RulesClient(rulesClientParams);
-
- const scheduledTaskId = 'task-123';
-
- unsecuredSavedObjectsClient.get.mockResolvedValueOnce({
- id: '1',
- type: RULE_SAVED_OBJECT_TYPE,
- attributes: {
- alertTypeId: '123',
- schedule: { interval: '10s' },
- params: {
- bar: true,
- },
- actions: [],
- enabled: true,
- scheduledTaskId,
- mutedInstanceIds: [],
- muteAll: true,
- },
- references: [],
- });
-
- taskManager.get.mockRejectedValueOnce(SavedObjectsErrorHelpers.createGenericNotFoundError());
-
- await rulesClient.getAlertState({ id: '1' });
-
- expect(rulesClientParams.logger.warn).toHaveBeenCalledTimes(1);
- expect(rulesClientParams.logger.warn).toHaveBeenCalledWith('Task (task-123) not found');
- });
-
- test('logs a warning if the taskManager throws an error', async () => {
- const rulesClient = new RulesClient(rulesClientParams);
-
- const scheduledTaskId = 'task-123';
-
- unsecuredSavedObjectsClient.get.mockResolvedValueOnce({
- id: '1',
- type: RULE_SAVED_OBJECT_TYPE,
- attributes: {
- alertTypeId: '123',
- schedule: { interval: '10s' },
- params: {
- bar: true,
- },
- actions: [],
- enabled: true,
- scheduledTaskId,
- mutedInstanceIds: [],
- muteAll: true,
- },
- references: [],
- });
-
- taskManager.get.mockRejectedValueOnce(SavedObjectsErrorHelpers.createBadRequestError());
-
- await rulesClient.getAlertState({ id: '1' });
-
- expect(rulesClientParams.logger.warn).toHaveBeenCalledTimes(1);
- expect(rulesClientParams.logger.warn).toHaveBeenCalledWith(
- 'An error occurred when getting the task state for (task-123)'
- );
- });
-
describe('authorization', () => {
beforeEach(() => {
unsecuredSavedObjectsClient.get.mockResolvedValueOnce({
From d2f566970c22220ba0130ae9446078d30ec8c995 Mon Sep 17 00:00:00 2001
From: Davis McPhee
Date: Thu, 15 Feb 2024 16:08:13 -0400
Subject: [PATCH 22/23] Fix flaky test from #172781 (#176972)
## Summary
This PR fixes the flaky test skipped in #172781.
Flaky test runner x95:
https://buildkite.com/elastic/kibana-flaky-test-suite-runner/builds/5187.
Resolves #172781.
### Checklist
- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [x] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [ ] Any UI touched in this PR is usable by keyboard only (learn more
about [keyboard accessibility](https://webaim.org/techniques/keyboard/))
- [ ] Any UI touched in this PR does not create any new axe failures
(run axe in browser:
[FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/),
[Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US))
- [ ] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [ ] This renders correctly on smaller devices using a responsive
layout. (You can test this [in your
browser](https://www.browserstack.com/guide/responsive-testing-on-local-server))
- [ ] This was checked for [cross-browser
compatibility](https://www.elastic.co/support/matrix#matrix_browsers)
### For maintainers
- [ ] This was checked for breaking API changes and was [labeled
appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
---
.../existing_fields.ts | 2 +-
.../page_objects/unified_field_list.ts | 16 ++++++++++++++++
.../existing_fields.ts | 5 ++---
3 files changed, 19 insertions(+), 4 deletions(-)
diff --git a/test/examples/unified_field_list_examples/existing_fields.ts b/test/examples/unified_field_list_examples/existing_fields.ts
index 341c440b3c8a8..2967e877383f1 100644
--- a/test/examples/unified_field_list_examples/existing_fields.ts
+++ b/test/examples/unified_field_list_examples/existing_fields.ts
@@ -73,7 +73,7 @@ export default ({ getService, getPageObjects }: FtrProviderContext) => {
await PageObjects.timePicker.setAbsoluteRange(TEST_START_TIME, TEST_END_TIME);
await PageObjects.header.waitUntilLoadingHasFinished();
await PageObjects.unifiedFieldList.waitUntilSidebarHasLoaded();
- await PageObjects.unifiedFieldList.toggleSidebarSection('meta');
+ await PageObjects.unifiedFieldList.openSidebarSection('meta');
});
after(async () => {
diff --git a/test/functional/page_objects/unified_field_list.ts b/test/functional/page_objects/unified_field_list.ts
index 5e2d1039d7697..378b20cc02e24 100644
--- a/test/functional/page_objects/unified_field_list.ts
+++ b/test/functional/page_objects/unified_field_list.ts
@@ -88,6 +88,22 @@ export class UnifiedFieldListPageObject extends FtrService {
);
}
+ public async openSidebarSection(sectionName: SidebarSectionName) {
+ const openedSectionSelector = `${this.getSidebarSectionSelector(
+ sectionName,
+ true
+ )}.euiAccordion-isOpen`;
+
+ if (await this.find.existsByCssSelector(openedSectionSelector)) {
+ return;
+ }
+
+ await this.retry.waitFor(`${sectionName} fields section to open`, async () => {
+ await this.toggleSidebarSection(sectionName);
+ return await this.find.existsByCssSelector(openedSectionSelector);
+ });
+ }
+
public async waitUntilFieldPopoverIsOpen() {
await this.retry.waitFor('popover is open', async () => {
return Boolean(await this.find.byCssSelector('[data-popover-open="true"]'));
diff --git a/x-pack/test_serverless/functional/test_suites/common/examples/unified_field_list_examples/existing_fields.ts b/x-pack/test_serverless/functional/test_suites/common/examples/unified_field_list_examples/existing_fields.ts
index 4725b39bd9db1..05400dbac1b38 100644
--- a/x-pack/test_serverless/functional/test_suites/common/examples/unified_field_list_examples/existing_fields.ts
+++ b/x-pack/test_serverless/functional/test_suites/common/examples/unified_field_list_examples/existing_fields.ts
@@ -78,7 +78,7 @@ export default ({ getService, getPageObjects }: FtrProviderContext) => {
await PageObjects.timePicker.setAbsoluteRange(TEST_START_TIME, TEST_END_TIME);
await PageObjects.header.waitUntilLoadingHasFinished();
await PageObjects.unifiedFieldList.waitUntilSidebarHasLoaded();
- await PageObjects.unifiedFieldList.toggleSidebarSection('meta');
+ await PageObjects.unifiedFieldList.openSidebarSection('meta');
});
after(async () => {
@@ -92,8 +92,7 @@ export default ({ getService, getPageObjects }: FtrProviderContext) => {
await kibanaServer.savedObjects.cleanStandardList();
});
- // FLAKY: https://github.com/elastic/kibana/issues/172781
- describe.skip('existence', () => {
+ describe('existence', () => {
it('should find which fields exist in the sample documents', async () => {
const sidebarFields = await PageObjects.unifiedFieldList.getAllFieldNames();
expect(sidebarFields.sort()).to.eql([...metaFields, ...fieldsWithData].sort());
From cec949ed0abfe4cd2a9f74d55c0d3212e96db328 Mon Sep 17 00:00:00 2001
From: "Christiane (Tina) Heiligers"
Date: Thu, 15 Feb 2024 13:09:11 -0700
Subject: [PATCH 23/23] Deprecates SavedObjectType migrations and schemas in
favor of modelVersions (#176970)
fix https://github.com/elastic/kibana/issues/176776
Saved objects `modelVersions` support BWC and ZDT and replace
`SavedObjectsType.schemas` and `SavedObjectsType.migrations` properties.
This PR marks these two properties as deprecated.
The public facing docs have also been updated to the "new" way of
implementing saved object changes.
### Checklist
Delete any items that are not applicable to this PR.
- [x] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [x]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
---
...objects-service-use-case-examples.asciidoc | 761 ++++++++++++++++++
.../core/saved-objects-service.asciidoc | 513 +++++++-----
.../src/saved_objects_type.ts | 4 +-
3 files changed, 1068 insertions(+), 210 deletions(-)
create mode 100644 docs/developer/architecture/core/saved-objects-service-use-case-examples.asciidoc
diff --git a/docs/developer/architecture/core/saved-objects-service-use-case-examples.asciidoc b/docs/developer/architecture/core/saved-objects-service-use-case-examples.asciidoc
new file mode 100644
index 0000000000000..2b2cbde0b3f1a
--- /dev/null
+++ b/docs/developer/architecture/core/saved-objects-service-use-case-examples.asciidoc
@@ -0,0 +1,761 @@
+[[saved-objects-service-use-case-examples]]
+=== Use-case examples
+
+These are example of the migration scenario currently supported (out of
+the box) by the system.
+
+*note:* _more complex scenarios (e.g field mutation by copy/sync) could
+already be implemented, but without the proper tooling exposed from
+Core, most of the work related to sync and compatibility would have to
+be implemented in the domain layer of the type owners, which is why
+we’re not documenting them yet._
+
+==== Adding a non-indexed field without default value
+
+We are currently in model version 1, and our type has 2 indexed fields
+defined: `foo` and `bar`.
+
+The definition of the type at version 1 would look like:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ // initial (and current) model version
+ 1: {
+ changes: [],
+ schemas: {
+ // FC schema defining the known fields (indexed or not) for this version
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ { unknowns: 'ignore' } // note the `unknown: ignore` which is how we're evicting the unknown fields
+ ),
+ // schema that will be used to validate input during `create` and `bulkCreate`
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ )
+ },
+ },
+ },
+ mappings: {
+ properties: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ },
+ },
+};
+----
+
+From here, say we want to introduce a new `dolly` field that is not
+indexed, and that we don’t need to populate with a default value.
+
+To achieve that, we need to introduce a new model version, with the only
+thing to do will be to define the associated schemas to include this new
+field.
+
+The added model version would look like:
+
+[source,ts]
+----
+// the new model version adding the `dolly` field
+let modelVersion2: SavedObjectsModelVersion = {
+ // not an indexed field, no data backfill, so changes are actually empty
+ changes: [],
+ schemas: {
+ // the only addition in this model version: taking the new field into account for the schemas
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' } // note the `unknown: ignore` which is how we're evicting the unknown fields
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+};
+----
+
+The full type definition after the addition of the new model version:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: {
+ changes: [],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ )
+ },
+ },
+ 2: {
+ changes: [],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+ },
+ },
+ mappings: {
+ properties: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ },
+ },
+};
+----
+
+==== Adding an indexed field without default value
+
+This scenario is fairly close to the previous one. The difference being
+that working with an indexed field means adding a `mappings_addition`
+change and to also update the root mappings accordingly.
+
+To reuse the previous example, let’s say the `dolly` field we want to
+add would need to be indexed instead.
+
+In that case, the new version needs to do the following: - add a
+`mappings_addition` type change to define the new mappings - update the
+root `mappings` accordingly - add the updated schemas as we did for the
+previous example
+
+The new version definition would look like:
+
+[source,ts]
+----
+let modelVersion2: SavedObjectsModelVersion = {
+ // add a change defining the mapping for the new field
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ dolly: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ // adding the new field to the forwardCompatibility schema
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+};
+----
+
+As said, we will also need to update the root mappings definition:
+
+[source,ts]
+----
+mappings: {
+ properties: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ dolly: { type: 'text' },
+ },
+},
+----
+
+the full type definition after the addition of the model version 2 would
+be:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: {
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ )
+ },
+ },
+ 2: {
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ dolly: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+ },
+ },
+ mappings: {
+ properties: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ dolly: { type: 'text' },
+ },
+ },
+};
+----
+
+==== Adding an indexed field with a default value
+
+Now a slightly different scenario where we’d like to populate the newly
+introduced field with a default value.
+
+In that case, we’d need to add an additional `data_backfill` change to
+populate the new field’s value (in addition to the `mappings_addition`
+one):
+
+[source,ts]
+----
+let modelVersion2: SavedObjectsModelVersion = {
+ changes: [
+ // setting the `dolly` field's default value.
+ {
+ type: 'data_backfill',
+ transform: (document) => {
+ return { attributes: { dolly: 'default_value' } };
+ },
+ },
+ // define the mappings for the new field
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ dolly: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ // define `dolly` as an know field in the schema
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+};
+----
+
+The full type definition would look like:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: {
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string() },
+ )
+ },
+ },
+ 2: {
+ changes: [
+ {
+ type: 'data_backfill',
+ transform: (document) => {
+ return { attributes: { dolly: 'default_value' } };
+ },
+ },
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ dolly: { type: 'text' },
+ },
+ },
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { foo: schema.string(), bar: schema.string(), dolly: schema.string() },
+ )
+ },
+ },
+ },
+ mappings: {
+ properties: {
+ foo: { type: 'text' },
+ bar: { type: 'text' },
+ dolly: { type: 'text' },
+ },
+ },
+};
+----
+
+*Note:* _if the field was non-indexed, we would just not use the
+`mappings_addition` change or update the mappings (as done in example
+1)_
+
+==== Removing an existing field
+
+We are currently in model version 1, and our type has 2 indexed fields
+defined: `kept` and `removed`.
+
+The definition of the type at version 1 would look like:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ // initial (and current) model version
+ 1: {
+ changes: [],
+ schemas: {
+ // FC schema defining the known fields (indexed or not) for this version
+ forwardCompatibility: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ { unknowns: 'ignore' } // note the `unknown: ignore` which is how we're evicting the unknown fields
+ ),
+ // schema that will be used to validate input during `create` and `bulkCreate`
+ create: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ )
+ },
+ },
+ },
+ mappings: {
+ properties: {
+ kept: { type: 'text' },
+ removed: { type: 'text' },
+ },
+ },
+};
+----
+
+From here, say we want to remove the `removed` field, as our application
+doesn’t need it anymore since a recent change.
+
+The first thing to understand here is the impact toward backward
+compatibility: Say that Kibana version `X` was still using this field,
+and that we stopped utilizing the field in version `X+1`.
+
+We can’t remove the data in version `X+1`, as we need to be able to
+rollback to the prior version at *any time*. If we were to delete the
+data of this `removed` field during the upgrade to version `X+1`, and if
+then, for any reason, we’d need to rollback to version `X`, it would
+cause a data loss, as version `X` was still using this field, but it
+would no longer present in our document after the rollback.
+
+Which is why we need to perform any field removal as a 2-step operation:
+- release `X`: Kibana still utilize the field - release `X+1`: Kibana no
+longer utilize the field, but the data is still present in the documents
+- release `X+2`: The data is effectively deleted from the documents.
+
+That way, any prior-version rollback (`X+2` to `X+1` *or* `X+1` to `X`
+is safe in term of data integrity)
+
+The main question then, is what’s the best way of having our application
+layer simply ignore this `removed` field during version `X+1`, as we
+don’t want this field (now non-utilized) to be returned from the
+persistence layer, as it could ``pollute'' the higher-layers where the
+field is effectively no longer used or even known.
+
+This can easily be done by introducing a new version and using the
+`forwardCompatibility` schema to ``shallow'' the field.
+
+The `X+1` model version would look like:
+
+[source,ts]
+----
+// the new model version ignoring the `removed` field
+let modelVersion2: SavedObjectsModelVersion = {
+ changes: [],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ )
+ },
+};
+----
+
+The full type definition after the addition of the new model version:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ // initial (and current) model version
+ 1: {
+ changes: [],
+ schemas: {
+ // FC schema defining the known fields (indexed or not) for this version
+ forwardCompatibility: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ { unknowns: 'ignore' } // note the `unknown: ignore` which is how we're evicting the unknown fields
+ ),
+ // schema that will be used to validate input during `create` and `bulkCreate`
+ create: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ )
+ },
+ },
+ 2: {
+ changes: [],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ )
+ },
+ }
+ },
+ mappings: {
+ properties: {
+ kept: { type: 'text' },
+ removed: { type: 'text' },
+ },
+ },
+};
+----
+
+then, in a *later* release, we can then deploy the change that will
+effectively remove the data from the documents:
+
+[source,ts]
+----
+// the new model version ignoring the `removed` field
+let modelVersion3: SavedObjectsModelVersion = {
+ changes: [ // define a data_removal change to delete the field
+ {
+ type: 'data_removal',
+ removedAttributePaths: ['removed']
+ }
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { kept: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { kept: schema.string() },
+ )
+ },
+};
+----
+
+The full type definition after the data removal would look like:
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ namespaceType: 'single',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ // initial (and current) model version
+ 1: {
+ changes: [],
+ schemas: {
+ // FC schema defining the known fields (indexed or not) for this version
+ forwardCompatibility: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ { unknowns: 'ignore' } // note the `unknown: ignore` which is how we're evicting the unknown fields
+ ),
+ // schema that will be used to validate input during `create` and `bulkCreate`
+ create: schema.object(
+ { kept: schema.string(), removed: schema.string() },
+ )
+ },
+ },
+ 2: {
+ changes: [],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { kept: schema.string() }, // `removed` is no longer defined here
+ )
+ },
+ },
+ 3: {
+ changes: [ // define a data_removal change to delete the field
+ {
+ type: 'data_removal',
+ removedAttributePaths: ['removed']
+ }
+ ],
+ schemas: {
+ forwardCompatibility: schema.object(
+ { kept: schema.string() },
+ { unknowns: 'ignore' }
+ ),
+ create: schema.object(
+ { kept: schema.string() },
+ )
+ },
+ }
+ },
+ mappings: {
+ properties: {
+ kept: { type: 'text' },
+ removed: { type: 'text' },
+ },
+ },
+};
+----
+
+=== Testing model versions
+
+Model versions definitions are more structured than the legacy migration
+functions, which makes them harder to test without the proper tooling.
+This is why a set of testing tools and utilities are exposed from the
+`@kbn/core-test-helpers-model-versions` package, to help properly test
+the logic associated with model version and their associated
+transformations.
+
+==== Tooling for unit tests
+
+For unit tests, the package exposes utilities to easily test the impact
+of transforming documents from a model version to another one, either
+upward or backward.
+
+===== Model version test migrator
+
+The `createModelVersionTestMigrator` helper allows to create a test
+migrator that can be used to test model version changes between
+versions, by transforming documents the same way the migration algorithm
+would during an upgrade.
+
+*Example:*
+
+[source,ts]
+----
+import {
+ createModelVersionTestMigrator,
+ type ModelVersionTestMigrator
+} from '@kbn/core-test-helpers-model-versions';
+
+const mySoTypeDefinition = someSoType();
+
+describe('mySoTypeDefinition model version transformations', () => {
+ let migrator: ModelVersionTestMigrator;
+
+ beforeEach(() => {
+ migrator = createModelVersionTestMigrator({ type: mySoTypeDefinition });
+ });
+
+ describe('Model version 2', () => {
+ it('properly backfill the expected fields when converting from v1 to v2', () => {
+ const obj = createSomeSavedObject();
+
+ const migrated = migrator.migrate({
+ document: obj,
+ fromVersion: 1,
+ toVersion: 2,
+ });
+
+ expect(migrated.properties).toEqual(expectedV2Properties);
+ });
+
+ it('properly removes the expected fields when converting from v2 to v1', () => {
+ const obj = createSomeSavedObject();
+
+ const migrated = migrator.migrate({
+ document: obj,
+ fromVersion: 2,
+ toVersion: 1,
+ });
+
+ expect(migrated.properties).toEqual(expectedV1Properties);
+ });
+ });
+});
+----
+
+==== Tooling for integration tests
+
+During integration tests, we can boot a real Elasticsearch cluster,
+allowing us to manipulate SO documents in a way almost similar to how it
+would be done on production runtime. With integration tests, we can even
+simulate the cohabitation of two Kibana instances with different model
+versions to assert the behavior of their interactions.
+
+===== Model version test bed
+
+The package exposes a `createModelVersionTestBed` function that can be
+used to fully setup a test bed for model version integration testing. It
+can be used to start and stop the ES server, and to initiate the
+migration between the two versions we’re testing.
+
+*Example:*
+
+[source,ts]
+----
+import {
+ createModelVersionTestBed,
+ type ModelVersionTestKit
+} from '@kbn/core-test-helpers-model-versions';
+
+describe('myIntegrationTest', () => {
+ const testbed = createModelVersionTestBed();
+ let testkit: ModelVersionTestKit;
+
+ beforeAll(async () => {
+ await testbed.startES();
+ });
+
+ afterAll(async () => {
+ await testbed.stopES();
+ });
+
+ beforeEach(async () => {
+ // prepare the test, preparing the index and performing the SO migration
+ testkit = await testbed.prepareTestKit({
+ savedObjectDefinitions: [{
+ definition: mySoTypeDefinition,
+ // the model version that will be used for the "before" version
+ modelVersionBefore: 1,
+ // the model version that will be used for the "after" version
+ modelVersionAfter: 2,
+ }]
+ })
+ });
+
+ afterEach(async () => {
+ if(testkit) {
+ // delete the indices between each tests to perform a migration again
+ await testkit.tearsDown();
+ }
+ });
+
+ it('can be used to test model version cohabitation', async () => {
+ // last registered version is `1` (modelVersionBefore)
+ const repositoryV1 = testkit.repositoryBefore;
+ // last registered version is `2` (modelVersionAfter)
+ const repositoryV2 = testkit.repositoryAfter;
+
+ // do something with the two repositories, e.g
+ await repositoryV1.create(someAttrs, { id });
+ const v2docReadFromV1 = await repositoryV2.get('my-type', id);
+ expect(v2docReadFromV1.attributes).toEqual(whatIExpect);
+ });
+});
+----
+
+*Limitations:*
+
+Because the test bed is only creating the parts of Core required to
+instantiate the two SO repositories, and because we’re not able to
+properly load all plugins (for proper isolation), the integration test
+bed currently has some limitations:
+
+* no extensions are enabled
+** no security
+** no encryption
+** no spaces
+* all SO types will be using the same SO index
+
+=== Limitations and edge cases in serverless environments
+
+The serverless environment, and the fact that upgrade in such
+environments are performed in a way where, at some point, the old and
+new version of the application are living in cohabitation, leads to some
+particularities regarding the way the SO APIs works, and to some
+limitations / edge case that we need to document
+
+==== Using the `fields` option of the `find` savedObjects API
+
+By default, the `find` API (as any other SO API returning documents)
+will migrate all documents before returning them, to ensure that
+documents can be used by both versions during a cohabitation (e.g an old
+node searching for documents already migrated, or a new node searching
+for documents not yet migrated).
+
+However, when using the `fields` option of the `find` API, the documents
+can’t be migrated, as some model version changes can’t be applied
+against a partial set of attributes. For this reason, when the `fields`
+option is provided, the documents returned from `find` will *not* be
+migrated.
+
+Which is why, when using this option, the API consumer needs to make
+sure that _all_ the fields passed to the `fields` option *were already
+present in the prior model version*. Otherwise, it may lead to
+inconsistencies during upgrades, where newly introduced or backfilled
+fields may not necessarily appear in the documents returned from the
+`search` API when the option is used.
+
+(_note_: both the previous and next version of Kibana must follow this
+rule then)
+
+==== Using `bulkUpdate` for fields with large `json` blobs
+
+The savedObjects `bulkUpdate` API will update documents client-side and
+then reindex the updated documents. These update operations are done
+in-memory, and cause memory constraint issues when updating many objects
+with large `json` blobs stored in some fields. As such, we recommend
+against using `bulkUpdate` for savedObjects that: - use arrays (as these
+tend to be large objects) - store large `json` blobs in some fields
+
diff --git a/docs/developer/architecture/core/saved-objects-service.asciidoc b/docs/developer/architecture/core/saved-objects-service.asciidoc
index eb9f1c7fd4516..71ece4ae3d735 100644
--- a/docs/developer/architecture/core/saved-objects-service.asciidoc
+++ b/docs/developer/architecture/core/saved-objects-service.asciidoc
@@ -47,6 +47,11 @@ export const dashboardVisualization: SavedObjectsType = {
name: 'dashboard_visualization', // <1>
hidden: true,
namespaceType: 'multiple-isolated', // <2>
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: modelVersion1,
+ 2: modelVersion2,
+ },
mappings: {
dynamic: false,
properties: {
@@ -58,10 +63,7 @@ export const dashboardVisualization: SavedObjectsType = {
},
},
},
- migrations: {
- '1.0.0': migratedashboardVisualizationToV1,
- '2.0.0': migratedashboardVisualizationToV2,
- },
+ // ...other mandatory properties
};
----
<1> Since the name of a Saved Object type may form part of the URL path for the
@@ -95,33 +97,32 @@ Each Saved Object type can define it's own {es} field mappings.
Because multiple Saved Object types can share the same index, mappings defined
by a type will be nested under a top-level field that matches the type name.
-For example, the mappings defined by the `dashboard_visualization` Saved
+For example, the mappings defined by the `search` Saved
Object type:
-.src/plugins/my_plugin/server/saved_objects/dashboard_visualization.ts
+https://github.com/elastic/kibana/blob/main/src/plugins/saved_search/server/saved_objects/search.ts#L19-L70[.src/plugins/saved_search/server/saved_objects/search.ts]
[source,typescript]
----
import { SavedObjectsType } from 'src/core/server';
-
-export const dashboardVisualization: SavedObjectsType = {
- name: 'dashboard_visualization',
- ...
+// ... other imports
+export function getSavedSearchObjectType: SavedObjectsType = { // <1>
+ name: 'search',
+ hidden: false,
+ namespaceType: 'multiple-isolated',
mappings: {
+ dynamic: false,
properties: {
- dynamic: false,
- description: {
- type: 'text',
- },
- hits: {
- type: 'integer',
- },
+ title: { type: 'text' },
+ description: { type: 'text' },
},
},
- migrations: { ... },
+ modelVersions: { ... },
+ // ...other optional properties
};
----
+<1> Simplification
-Will result in the following mappings being applied to the `.kibana` index:
+Will result in the following mappings being applied to the `.kibana_analytics` index:
[source,json]
----
{
@@ -129,14 +130,14 @@ Will result in the following mappings being applied to the `.kibana` index:
"dynamic": "strict",
"properties": {
...
- "dashboard_vizualization": {
+ "search": {
"dynamic": false,
"properties": {
- "description": {
+ "title": {
"type": "text",
},
- "hits": {
- "type": "integer",
+ "description": {
+ "type": "text",
},
},
}
@@ -157,242 +158,336 @@ Saved Object types should never use `dynamic: true` as this can cause an
arbitrary amount of fields to be added to the `.kibana` index.
[[saved-objects-service-writing-migrations]]
-==== Writing Migrations
+==== Writing Migrations by defining model versions
-Saved Objects support schema changes between Kibana versions, which we call
-migrations. Migrations are applied when a Kibana installation is upgraded from
-one version to the next, when exports are imported via the Saved Objects
-Management UI, or when a new object is created via the HTTP API.
+Saved Objects support changes using `modelVersions``. The modelVersion API is a new way to define transformations
+(_``migrations''_) for your savedObject types, and will replace the
+``legacy'' migration API after Kibana version `8.10.0`. The legacy migration API has been deprecated, meaning it is no longer possible to register migrations using the legacy system.
-Each Saved Object type may define migrations for its schema. Migrations are
-specified by the Kibana version number, receive an input document, and must
-return the fully migrated document to be persisted to Elasticsearch.
+Model versions are decoupled from the stack version and satisfy the requirements for zero downtime and backward-compatibility.
-Let's say we want to define two migrations:
-- In version 1.1.0, we want to drop the `subtitle` field and append it to the
- title
-- In version 1.4.0, we want to add a new `id` field to every panel with a newly
- generated UUID.
+Each Saved Object type may define model versions for its schema and are bound to a given https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts#L22-L27[savedObject type]. Changes to a saved object type are
+specified by defining a new model.
-First, the current `mappings` should always reflect the latest or "target"
-schema. Next, we should define a migration function for each step in the schema
-evolution:
+=== Defining model versions
-src/plugins/my_plugin/server/saved_objects/dashboard_visualization.ts
-[source,typescript]
+As for old migrations, model versions are bound to a given
+https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts#L22-L27[savedObject
+type]
+
+When registering a SO type, a new
+https://github.com/elastic/kibana/blob/9a6a2ccdff619f827b31c40dd9ed30cb27203da7/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts#L138-L177[modelVersions]
+property is available. This attribute is a map of
+https://github.com/elastic/kibana/blob/9a6a2ccdff619f827b31c40dd9ed30cb27203da7/packages/core/saved-objects/core-saved-objects-server/src/model_version/model_version.ts#L12-L20[SavedObjectsModelVersion]
+which is the top-level type/container to define model versions.
+
+This map follows a similar `{ [version number] => version definition }`
+format as the old migration map, however a given SO type’s model version
+is now identified by a single integer.
+
+The first version must be numbered as version 1, incrementing by one for
+each new version.
+
+That way: - SO type versions are decoupled from stack versioning - SO
+type versions are independent between types
+
+_a *valid* version numbering:_
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: modelVersion1, // valid: start with version 1
+ 2: modelVersion2, // valid: no gap between versions
+ },
+ // ...other mandatory properties
+};
----
-import { SavedObjectsType, SavedObjectMigrationFn } from 'src/core/server';
-import uuid from 'uuid';
-interface DashboardVisualizationPre110 {
- title: string;
- subtitle: string;
- panels: Array<{}>;
-}
-interface DashboardVisualization110 {
- title: string;
- panels: Array<{}>;
-}
+_an *invalid* version numbering:_
-interface DashboardVisualization140 {
- title: string;
- panels: Array<{ id: string }>;
-}
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 2: modelVersion2, // invalid: first version must be 1
+ 4: modelVersion3, // invalid: skipped version 3
+ },
+ // ...other mandatory properties
+};
+----
-const migrateDashboardVisualization110: SavedObjectMigrationFn<
- DashboardVisualizationPre110, // <1>
- DashboardVisualization110
-> = (doc) => {
- const { subtitle, ...attributesWithoutSubtitle } = doc.attributes;
- return {
- ...doc, // <2>
- attributes: {
- ...attributesWithoutSubtitle,
- title: `${doc.attributes.title} - ${doc.attributes.subtitle}`,
+=== Structure of a model version
+
+https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/model_version/model_version.ts#L12-L20[Model
+versions] are not just functions as the previous migrations were, but
+structured objects describing how the version behaves and what changed
+since the last one.
+
+_A base example of what a model version can look like:_
+
+[source,ts]
+----
+const myType: SavedObjectsType = {
+ name: 'test',
+ switchToModelVersionAt: '8.10.0',
+ modelVersions: {
+ 1: {
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ someNewField: { type: 'text' },
+ },
+ },
+ {
+ type: 'data_backfill',
+ transform: someBackfillFunction,
+ },
+ ],
+ schemas: {
+ forwardCompatibility: fcSchema,
+ create: createSchema,
+ },
},
- };
+ },
+ // ...other mandatory properties
};
+----
+
+*Note:* Having multiple changes of the same type for a given version is
+supported by design to allow merging different sources (to prepare for
+an eventual higher-level API)
-const migrateDashboardVisualization140: SavedObjectMigrationFn<
- DashboardVisualization110,
- DashboardVisualization140
-> = (doc) => {
- const outPanels = doc.attributes.panels?.map((panel) => {
- return { ...panel, id: uuid.v4() };
- });
- return {
- ...doc,
- attributes: {
- ...doc.attributes,
- panels: outPanels,
+_This definition would be perfectly valid:_
+
+[source,ts]
+----
+const version1: SavedObjectsModelVersion = {
+ changes: [
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ someNewField: { type: 'text' },
+ },
+ },
+ {
+ type: 'mappings_addition',
+ addedMappings: {
+ anotherNewField: { type: 'text' },
+ },
},
- };
+ ],
};
+----
-export const dashboardVisualization: SavedObjectsType = {
- name: 'dashboard_visualization', // <1>
- /** ... */
- migrations: {
- // Takes a pre 1.1.0 doc, and converts it to 1.1.0
- '1.1.0': migrateDashboardVisualization110,
+It’s currently composed of two main properties:
+
+==== changes
+
+https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/model_version/model_version.ts#L21-L51[link
+to the TS doc for `changes`]
+
+Describes the list of changes applied during this version.
+
+*Important:* This is the part that replaces the old migration system,
+and allows defining when a version adds new mapping, mutates the
+documents, or other type-related changes.
+
+The current types of changes are:
+
+===== - mappings_addition
+
+Used to define new mappings introduced in a given version.
- // Takes a 1.1.0 doc, and converts it to 1.4.0
- '1.4.0': migrateDashboardVisualization140, // <3>
+_Usage example:_
+
+[source,ts]
+----
+const change: SavedObjectsModelMappingsAdditionChange = {
+ type: 'mappings_addition',
+ addedMappings: {
+ newField: { type: 'text' },
+ existingNestedField: {
+ properties: {
+ newNestedProp: { type: 'keyword' },
+ },
+ },
},
};
----
-<1> It is useful to define an interface for each version of the schema. This
-allows TypeScript to ensure that you are properly handling the input and output
-types correctly as the schema evolves.
-<2> Returning a shallow copy is necessary to avoid type errors when using
-different types for the input and output shape.
-<3> Migrations do not have to be defined for every version. The version number
-of a migration must always be the earliest Kibana version in which this
-migration was released. So if you are creating a migration which will be
-part of the v7.10.0 release, but will also be backported and released as
-v7.9.3, the migration version should be: 7.9.3.
-Migrations should be written defensively, an exception in a migration function
-will prevent a Kibana upgrade from succeeding and will cause downtime for our
-users. Having said that, if a document is encountered that is not in the
-expected shape, migrations are encouraged to throw an exception to abort the
-upgrade. In most scenarios, it is better to fail an upgrade than to silently
-ignore a corrupt document which can cause unexpected behaviour at some future
-point in time.
+*note:* _When adding mappings, the root `type.mappings` must also be
+updated accordingly (as it was done previously)._
-WARNING: Do not attempt to change the `migrationVersion`, `id`, or `type` fields
-within a migration function, this is not supported.
+===== - mappings_deprecation
-It is critical that you have extensive tests to ensure that migrations behave
-as expected with all possible input documents. Given how simple it is to test
-all the branch conditions in a migration function and the high impact of a bug
-in this code, there's really no reason not to aim for 100% test code coverage.
+Used to flag mappings as no longer being used and ready to be removed.
-==== Type visibility
-It is recommended that plugins only expose Saved Object types that are necessary.
-That is so to provide better backward compatibility.
+_Usage example:_
-There are two options to register a type: either as completely unexposed to the global Saved Objects HTTP APIs and client or to only expose it to the client but not to the APIs.
-
-===== Hidden types
+[source,ts]
+----
+let change: SavedObjectsModelMappingsDeprecationChange = {
+ type: 'mappings_deprecation',
+ deprecatedMappings: ['someDeprecatedField', 'someNested.deprecatedField'],
+};
+----
-In case when the type is not hidden, it will be exposed via the global Saved Objects HTTP API.
-That brings the limitation of introducing backward incompatible changes as there might be a service consuming the API.
+*note:* _It is currently not possible to remove fields from an existing
+index’s mapping (without reindexing into another index), so the mappings
+flagged with this change type won’t be deleted for now, but this should
+still be used to allow our system to clean the mappings once upstream
+(ES) unblock us._
-This is a formal limitation not prohibiting backward incompatible changes in the migrations.
-But in case of such changes on the hidden types, they can be resolved and encapsulated on the intermediate layer in the plugin API.
+===== - data_backfill
-Hence, the main idea is that all the interactions with the Saved Objects should be done via the plugin API rather than via the Saved Objects HTTP API.
+Used to populate fields (indexed or not) added in the same version.
-By default, the hidden types will not be accessible in the Saved Objects client.
-To make them accessible, they should be explicitly listed in the `includedHiddenTypes` parameters upon client creation.
+_Usage example:_
-[source,typescript]
+[source,ts]
+----
+let change: SavedObjectsModelDataBackfillChange = {
+ type: 'data_backfill',
+ transform: (document) => {
+ return { attributes: { someAddedField: 'defaultValue' } };
+ },
+};
----
-import { CoreStart, Plugin } from '@kbn/core/server';
-class SomePlugin implements Plugin {
- // ...
+*note:* _Even if no check is performed to ensure it, this type of model
+change should only be used to backfill newly introduced fields._
- public start({ savedObjects }: CoreStart) {
- // ...
+===== - data_removal
- const savedObjectsClient = savedObjects.getScopedClient(
- request,
- { includedHiddenTypes: ['dashboard_visualization'] }
- );
+Used to remove data (unset fields) from all documents of the type.
- // ...
- }
-}
+_Usage example:_
+
+[source,ts]
+----
+let change: SavedObjectsModelDataRemovalChange = {
+ type: 'data_removal',
+ attributePaths: ['someRootAttributes', 'some.nested.attribute'],
+};
----
-===== Hidden from the HTTP APIs
+*note:* _Due to backward compatibility, field utilization must be
+stopped in a prior release before actual data removal (in case of
+rollback). Please refer to the field removal migration example below in
+this document_
-When a saved object is registered as hidden from the HTTP APIs, it will remain exposed to the global Saved Objects client:
+===== - unsafe_transform
-[source,typescript]
-----
-import { SavedObjectsType } from 'src/core/server';
+Used to execute an arbitrary transformation function.
-export const myCustomVisualization: SavedObjectsType = {
- name: 'my_custom_visualization', // <1>
- hidden: false,
- hiddenFromHttpApis: true, // <2>
- namespaceType: 'multiple-isolated',
- mappings: {
- dynamic: false,
- properties: {
- description: {
- type: 'text',
- },
- hits: {
- type: 'integer',
- },
- },
- },
- migrations: {
- '1.0.0': migrateMyCustomVisualizationToV1,
- '2.0.0': migrateMyCustomVisualizationToV2,
+_Usage example:_
+
+[source,ts]
+----
+let change: SavedObjectsModelUnsafeTransformChange = {
+ type: 'unsafe_transform',
+ transformFn: (document) => {
+ document.attributes.someAddedField = 'defaultValue';
+ return { document };
},
};
----
-<1> MyCustomVisualization types have their own domain-specific HTTP API's that leverage the global Saved Objects client
-<2> This field determines "hidden from http apis" behavior -- any attempts to use the global Saved Objects HTTP APIs will throw errors
+*note:* _Using such transformations is potentially unsafe, given the
+migration system will have no knowledge of which kind of operations will
+effectively be executed against the documents. Those should only be used
+when there’s no other way to cover one’s migration needs._ *Please reach
+out to the development team if you think you need to use this, as you
+theoretically shouldn’t.*
+
+==== schemas
+
+https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/model_version/schemas.ts#L11-L16[link
+to the TS doc for `schemas`]
+
+The schemas associated with this version. Schemas are used to validate
+or convert SO documents at various stages of their lifecycle.
-=== Client side usage
+The currently available schemas are:
-==== References
+===== forwardCompatibility
-When a Saved Object declares `references` to other Saved Objects, the
-Saved Objects Export API will automatically export the target object with all
-of its references. This makes it easy for users to export the entire
-reference graph of an object.
+This is a new concept introduced by model versions. This schema is used
+for inter-version compatibility.
-If a Saved Object can't be used on its own, that is, it needs other objects
-to exist for a feature to function correctly, that Saved Object should declare
-references to all the objects it requires. For example, a `dashboard`
-object might have panels for several `visualization` objects. When these
-`visualization` objects don't exist, the dashboard cannot be rendered
-correctly. The `dashboard` object should declare references to all its
-visualizations.
+When retrieving a savedObject document from an index, if the version of
+the document is higher than the latest version known of the Kibana
+instance, the document will go through the `forwardCompatibility` schema
+of the associated model version.
-However, `visualization` objects can continue to be rendered or embedded into
-other dashboards even if the `dashboard` it was originally embedded into
-doesn't exist. As a result, `visualization` objects should not declare
-references to `dashboard` objects.
+*Important:* These conversion mechanism shouldn’t assert the data
+itself, and only strip unknown fields to convert the document to the
+*shape* of the document at the given version.
-For each referenced object, an `id`, `type` and `name` are added to the
-`references` array:
+Basically, this schema should keep all the known fields of a given
+version, and remove all the unknown fields, without throwing.
-[source, typescript]
+Forward compatibility schema can be implemented in two different ways.
+
+[arabic]
+. Using `config-schema`
+
+_Example of schema for a version having two fields: someField and
+anotherField_
+
+[source,ts]
----
-router.get(
- { path: '/some-path', validate: false },
- async (context, req, res) => {
- const object = await context.core.savedObjects.client.create(
- 'dashboard',
- {
- title: 'my dashboard',
- panels: [
- { visualization: 'vis1' }, // <1>
- ],
- indexPattern: 'indexPattern1'
- },
- { references: [
- { id: '...', type: 'visualization', name: 'vis1' },
- { id: '...', type: 'index_pattern', name: 'indexPattern1' },
- ]
- }
- )
- ...
- }
+const versionSchema = schema.object(
+ {
+ someField: schema.maybe(schema.string()),
+ anotherField: schema.maybe(schema.string()),
+ },
+ { unknowns: 'ignore' }
);
----
-<1> Note how `dashboard.panels[0].visualization` stores the `name` property of
-the reference (not the `id` directly) to be able to uniquely identify this
-reference. This guarantees that the id the reference points to always remains
-up to date. If a visualization `id` was directly stored in
-`dashboard.panels[0].visualization` there is a risk that this `id` gets
-updated without updating the reference in the references array.
+
+*Important:* Note the `{ unknowns: 'ignore' }` in the schema’s options.
+This is required when using `config-schema` based schemas, as this what
+will evict the additional fields without throwing an error.
+
+[arabic, start=2]
+. Using a plain javascript function
+
+_Example of schema for a version having two fields: someField and
+anotherField_
+
+[source,ts]
+----
+const versionSchema: SavedObjectModelVersionEvictionFn = (attributes) => {
+ const knownFields = ['someField', 'anotherField'];
+ return pick(attributes, knownFields);
+}
+----
+
+*note:* _Even if highly recommended, implementing this schema is not
+strictly required. Type owners can manage unknown fields and
+inter-version compatibility themselves in their service layer instead._
+
+===== create
+
+This is a direct replacement for
+https://github.com/elastic/kibana/blob/9b330e493216e8dde3166451e4714966f63f5ab7/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts#L75-L82[the
+old SavedObjectType.schemas] definition, now directly included in the
+model version definition.
+
+As a refresher the `create` schema is a `@kbn/config-schema` object-type
+schema, and is used to validate the properties of the document during
+`create` and `bulkCreate` operations.
+
+*note:* _Implementing this schema is optional, but still recommended, as
+otherwise there will be no validating when importing objects_
+
+For implementation examples, refer to <>.
+
+include::saved-objects-service-use-case-examples.asciidoc[leveloffset=+1]
diff --git a/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts b/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts
index 716b31406649c..53b52d04f376b 100644
--- a/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts
+++ b/packages/core/saved-objects/core-saved-objects-server/src/saved_objects_type.ts
@@ -70,6 +70,7 @@ export interface SavedObjectsType {
mappings: SavedObjectsTypeMappingDefinition;
/**
* An optional map of {@link SavedObjectMigrationFn | migrations} or a function returning a map of {@link SavedObjectMigrationFn | migrations} to be used to migrate the type.
+ * @deprecated Use {@link SavedObjectsType.modelVersions | modelVersions} instead.
*/
migrations?: SavedObjectMigrationMap | (() => SavedObjectMigrationMap);
/**
@@ -78,6 +79,7 @@ export interface SavedObjectsType {
* When provided, calls to {@link SavedObjectsClient.create | create} will be validated against this schema.
*
* See {@link SavedObjectsValidationMap} for more details.
+ * @deprecated Use {@link SavedObjectsType.modelVersions | modelVersions} instead.
*/
schemas?: SavedObjectsValidationMap | (() => SavedObjectsValidationMap);
/**
@@ -177,7 +179,7 @@ export interface SavedObjectsType {
modelVersions?: SavedObjectsModelVersionMap | SavedObjectsModelVersionMapProvider;
/**
- * Allows to opt-in to the new model version API.
+ * Allows to opt-in to the model version API.
*
* Must be a valid semver version (with the patch version being necessarily 0)
*