From 500bd1e53248ff043893df52b3570a206cd534f8 Mon Sep 17 00:00:00 2001 From: Andrew Watkins Date: Tue, 4 Aug 2020 09:46:00 -0700 Subject: [PATCH 01/13] perf: event cells when TimeSeries renders (#19197) * perf: new render id when dashboard refreshes * perf: record vis time properly for scrolled cells * fix: correct props --- .../dashboards/components/DashboardPage.tsx | 15 ++++++++++++ ui/src/shared/components/RefreshingView.tsx | 1 + ui/src/shared/components/TimeSeries.tsx | 21 ++++++++++++++-- ui/src/shared/components/cells/Cell.tsx | 24 ++----------------- 4 files changed, 37 insertions(+), 24 deletions(-) diff --git a/ui/src/dashboards/components/DashboardPage.tsx b/ui/src/dashboards/components/DashboardPage.tsx index 87547500dc0..258e96d9331 100644 --- a/ui/src/dashboards/components/DashboardPage.tsx +++ b/ui/src/dashboards/components/DashboardPage.tsx @@ -66,6 +66,21 @@ class DashboardPage extends Component { } } + public componentDidUpdate(prevProps) { + const {setRenderID, dashboard, manualRefresh} = this.props + + if (prevProps.manualRefresh !== manualRefresh) { + const renderID = uuid.v4() + setRenderID('dashboard', renderID) + const tags = { + dashboardID: dashboard.id, + } + const fields = {renderID} + + event('Dashboard Mounted', tags, fields) + } + } + public componentWillUnmount() { if (isFlagEnabled('queryCacheForDashboards')) { resetQueryCache() diff --git a/ui/src/shared/components/RefreshingView.tsx b/ui/src/shared/components/RefreshingView.tsx index d1dfadab07d..acbb12b756f 100644 --- a/ui/src/shared/components/RefreshingView.tsx +++ b/ui/src/shared/components/RefreshingView.tsx @@ -71,6 +71,7 @@ class RefreshingView extends PureComponent { return ( { private pendingCheckStatuses: CancelBox = null public componentDidMount() { + const {cellID, setCellMount} = this.props this.observer = new IntersectionObserver(entries => { entries.forEach(entry => { const {isIntersecting} = entry - if (!this.isIntersecting && isIntersecting && this.pendingReload) { + const reload = + !this.isIntersecting && isIntersecting && this.pendingReload + + if (reload) { this.reload() } + if (reload && cellID) { + setCellMount(cellID, new Date().getTime()) + } + this.isIntersecting = isIntersecting }) }) @@ -141,9 +151,15 @@ class TimeSeries extends Component { } public componentDidUpdate(prevProps: Props) { - if (this.shouldReload(prevProps) && this.isIntersecting) { + const {setCellMount, cellID} = this.props + const reload = this.shouldReload(prevProps) && this.isIntersecting + if (reload) { this.reload() } + + if (reload && cellID) { + setCellMount(cellID, new Date().getTime()) + } } public componentWillUnmount() { @@ -383,6 +399,7 @@ const mstp = (state: AppState, props: OwnProps) => { const mdtp = { notify: notifyAction, onGetCachedResultsThunk: getCachedResultsThunk, + setCellMount: setCellMountAction, } const connector = connect(mstp, mdtp) diff --git a/ui/src/shared/components/cells/Cell.tsx b/ui/src/shared/components/cells/Cell.tsx index dbe57e12586..0a4daf45df0 100644 --- a/ui/src/shared/components/cells/Cell.tsx +++ b/ui/src/shared/components/cells/Cell.tsx @@ -11,9 +11,6 @@ import RefreshingView from 'src/shared/components/RefreshingView' import {ErrorHandling} from 'src/shared/decorators/errors' import EmptyGraphMessage from 'src/shared/components/EmptyGraphMessage' -// Action -import {setCellMount as setCellMountAction} from 'src/perf/actions' - // Utils import {getByID} from 'src/resources/selectors' @@ -33,20 +30,10 @@ interface State { inView: boolean } -interface DispatchProps { - setCellMount: typeof setCellMountAction -} - -type Props = StateProps & OwnProps & DispatchProps +type Props = StateProps & OwnProps @ErrorHandling class CellComponent extends Component { - componentDidMount() { - const {cell, setCellMount} = this.props - - setCellMount(cell.id, new Date().getTime()) - } - public render() { const {cell, view} = this.props @@ -131,11 +118,4 @@ const mstp = (state: AppState, ownProps: OwnProps) => { return {view} } -const mdtp = { - setCellMount: setCellMountAction, -} - -export default connect( - mstp, - mdtp -)(CellComponent) +export default connect(mstp)(CellComponent) From 8a3abfbe8557fa655c253d9dd5fca5d4aa89dfc7 Mon Sep 17 00:00:00 2001 From: Ariel Salem Date: Tue, 4 Aug 2020 11:19:15 -0700 Subject: [PATCH 02/13] chore(clone-cell-test): added an e2e test for cloning cells (#19207) --- ui/cypress/e2e/dashboardsView.test.ts | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/ui/cypress/e2e/dashboardsView.test.ts b/ui/cypress/e2e/dashboardsView.test.ts index 61110ce54f1..d1dd88efb12 100644 --- a/ui/cypress/e2e/dashboardsView.test.ts +++ b/ui/cypress/e2e/dashboardsView.test.ts @@ -41,7 +41,7 @@ describe('Dashboard', () => { cy.getByTestID('dashboard-card').should('contain', newName) }) - it('can create and destroy cells & toggle in and out of presentation mode', () => { + it('can create, clone and destroy cells & toggle in and out of presentation mode', () => { cy.get('@org').then(({id: orgID}: Organization) => { cy.createDashboard(orgID).then(({body}) => { cy.fixture('routes').then(({orgs}) => { @@ -150,12 +150,23 @@ describe('Dashboard', () => { // Remove Note cell cy.getByTestID('cell-context--toggle') - .first() + .last() .click() cy.getByTestID('cell-context--delete').click() cy.getByTestID('cell-context--delete-confirm').click() - // Remove View cell + // Clone View cell + cy.getByTestID('cell-context--toggle').click() + cy.getByTestID('cell-context--clone').click() + + // Ensure that the clone exists + cy.getByTestID('cell Line Graph (Clone)').should('exist') + // Remove View cells + cy.getByTestID('cell-context--toggle') + .first() + .click() + cy.getByTestID('cell-context--delete').click() + cy.getByTestID('cell-context--delete-confirm').click() cy.getByTestID('cell-context--toggle').click() cy.getByTestID('cell-context--delete').click() cy.getByTestID('cell-context--delete-confirm').click() @@ -338,7 +349,7 @@ describe('Dashboard', () => { `?lower=now%28%29%20-%201h&vars%5BbucketsCSV%5D=${defaultBucket}` ) - // open CEO + // open VEO cy.getByTestID('cell-context--toggle').click() cy.getByTestID('cell-context--configure').click() @@ -396,7 +407,7 @@ describe('Dashboard', () => { .pipe(getSelectedVariable(dashboard.id, 2)) .should('equal', 'v2') - // open CEO + // open VEO cy.getByTestID('cell-context--toggle').click() cy.getByTestID('cell-context--configure').click() cy.getByTestID('toolbar-tab').should('be.visible') From a61161d73b5904ada12929dc40da3be8af76d314 Mon Sep 17 00:00:00 2001 From: rbose22 <54558279+rbose22@users.noreply.github.com> Date: Tue, 4 Aug 2020 16:24:58 -0700 Subject: [PATCH 03/13] feat: added backend for mosaic graph type (#19195) Closes: #19121 Closes: #19211 Added the ability to save the mosaic graph type. Co-authored-by: Rose Parker reparker837@gmail.com --- dashboard.go | 39 +++++++++++++++ http/swagger.yml | 76 +++++++++++++++++++++++++++++ pkger/clone_resource.go | 15 ++++++ pkger/parser.go | 1 + pkger/parser_models.go | 27 +++++++++- pkger/parser_test.go | 44 ++++++++++++++++- pkger/service_test.go | 25 ++++++++++ pkger/testdata/dashboard_mosaic.yml | 42 ++++++++++++++++ 8 files changed, 266 insertions(+), 3 deletions(-) create mode 100644 pkger/testdata/dashboard_mosaic.yml diff --git a/dashboard.go b/dashboard.go index 9f111774e13..bfdec50d384 100644 --- a/dashboard.go +++ b/dashboard.go @@ -375,6 +375,7 @@ const ( ViewPropertyTypeSingleStatPlusLine = "line-plus-single-stat" ViewPropertyTypeTable = "table" ViewPropertyTypeXY = "xy" + ViewPropertyTypeMosaic = "mosaic" ) // ViewProperties is used to mark other structures as conforming to a View. @@ -484,6 +485,12 @@ func UnmarshalViewPropertiesJSON(b []byte) (ViewProperties, error) { return nil, err } vis = sv + case ViewPropertyTypeMosaic: + var mv MosaicViewProperties + if err := json.Unmarshal(v.B, &mv); err != nil { + return nil, err + } + vis = mv } case "empty": var ev EmptyViewProperties @@ -574,6 +581,15 @@ func MarshalViewPropertiesJSON(v ViewProperties) ([]byte, error) { ScatterViewProperties: vis, } + case MosaicViewProperties: + s = struct { + Shape string `json:"shape"` + MosaicViewProperties + }{ + Shape: "chronograf-v2", + + MosaicViewProperties: vis, + } case MarkdownViewProperties: s = struct { Shape string `json:"shape"` @@ -790,6 +806,27 @@ type ScatterViewProperties struct { TimeFormat string `json:"timeFormat"` } +// MosaicViewProperties represents options for mosaic view in Chronograf +type MosaicViewProperties struct { + Type string `json:"type"` + Queries []DashboardQuery `json:"queries"` + ViewColors []string `json:"colors"` + FillColumns []string `json:"fillColumns"` + XColumn string `json:"xColumn"` + YSeriesColumns []string `json:"ySeriesColumns"` + XDomain []float64 `json:"xDomain,omitempty"` + YDomain []float64 `json:"yDomain,omitempty"` + XAxisLabel string `json:"xAxisLabel"` + YAxisLabel string `json:"yAxisLabel"` + XPrefix string `json:"xPrefix"` + XSuffix string `json:"xSuffix"` + YPrefix string `json:"yPrefix"` + YSuffix string `json:"ySuffix"` + Note string `json:"note"` + ShowNoteWhenEmpty bool `json:"showNoteWhenEmpty"` + TimeFormat string `json:"timeFormat"` +} + // GaugeViewProperties represents options for gauge view in Chronograf type GaugeViewProperties struct { Type string `json:"type"` @@ -848,6 +885,7 @@ func (SingleStatViewProperties) viewProperties() {} func (HistogramViewProperties) viewProperties() {} func (HeatmapViewProperties) viewProperties() {} func (ScatterViewProperties) viewProperties() {} +func (MosaicViewProperties) viewProperties() {} func (GaugeViewProperties) viewProperties() {} func (TableViewProperties) viewProperties() {} func (MarkdownViewProperties) viewProperties() {} @@ -860,6 +898,7 @@ func (v SingleStatViewProperties) GetType() string { return v.Type } func (v HistogramViewProperties) GetType() string { return v.Type } func (v HeatmapViewProperties) GetType() string { return v.Type } func (v ScatterViewProperties) GetType() string { return v.Type } +func (v MosaicViewProperties) GetType() string { return v.Type } func (v GaugeViewProperties) GetType() string { return v.Type } func (v TableViewProperties) GetType() string { return v.Type } func (v MarkdownViewProperties) GetType() string { return v.Type } diff --git a/http/swagger.yml b/http/swagger.yml index 6d569dcc0bf..7fcb88b8390 100644 --- a/http/swagger.yml +++ b/http/swagger.yml @@ -9009,6 +9009,81 @@ components: type: string decimalPlaces: $ref: "#/components/schemas/DecimalPlaces" + MosaicViewProperties: + type: object + required: + - type + - queries + - colors + - shape + - note + - showNoteWhenEmpty + - xColumn + - ySeriesColumns + - fillColumns + - xDomain + - yDomain + - xAxisLabel + - yAxisLabel + - xPrefix + - yPrefix + - xSuffix + - ySuffix + properties: + timeFormat: + type: string + type: + type: string + enum: [mosaic] + queries: + type: array + items: + $ref: "#/components/schemas/DashboardQuery" + colors: + description: Colors define color encoding of data into a visualization + type: array + items: + type: string + shape: + type: string + enum: ["chronograf-v2"] + note: + type: string + showNoteWhenEmpty: + description: If true, will display note when empty + type: boolean + xColumn: + type: string + ySeriesColumns: + type: array + items: + type: string + fillColumns: + type: array + items: + type: string + xDomain: + type: array + items: + type: number + maxItems: 2 + yDomain: + type: array + items: + type: number + maxItems: 2 + xAxisLabel: + type: string + yAxisLabel: + type: string + xPrefix: + type: string + xSuffix: + type: string + yPrefix: + type: string + ySuffix: + type: string ScatterViewProperties: type: object required: @@ -9591,6 +9666,7 @@ components: - $ref: "#/components/schemas/CheckViewProperties" - $ref: "#/components/schemas/ScatterViewProperties" - $ref: "#/components/schemas/HeatmapViewProperties" + - $ref: "#/components/schemas/MosaicViewProperties" View: required: - name diff --git a/pkger/clone_resource.go b/pkger/clone_resource.go index fcc4e71b1d6..4a7d8b14a28 100644 --- a/pkger/clone_resource.go +++ b/pkger/clone_resource.go @@ -641,6 +641,18 @@ func convertCellView(cell influxdb.Cell) chart { setNoteFixes(p.Note, p.ShowNoteWhenEmpty, p.Prefix, p.Suffix) ch.TickPrefix = p.TickPrefix ch.TickSuffix = p.TickSuffix + case influxdb.MosaicViewProperties: + ch.Kind = chartKindMosaic + ch.Queries = convertQueries(p.Queries) + ch.Colors = stringsToColors(p.ViewColors) + ch.XCol = p.XColumn + ch.YSeriesColumns = p.YSeriesColumns + ch.Axes = []axis{ + {Label: p.XAxisLabel, Prefix: p.XPrefix, Suffix: p.XSuffix, Name: "x", Domain: p.XDomain}, + {Label: p.YAxisLabel, Prefix: p.YPrefix, Suffix: p.YSuffix, Name: "y", Domain: p.YDomain}, + } + ch.Note = p.Note + ch.NoteOnEmpty = p.ShowNoteWhenEmpty case influxdb.ScatterViewProperties: ch.Kind = chartKindScatter ch.Queries = convertQueries(p.Queries) @@ -705,6 +717,9 @@ func convertChartToResource(ch chart) Resource { if len(ch.Axes) > 0 { r[fieldChartAxes] = ch.Axes } + if len(ch.YSeriesColumns) > 0 { + r[fieldChartYSeriesColumns] = ch.YSeriesColumns + } if ch.EnforceDecimals { r[fieldChartDecimalPlaces] = ch.DecimalPlaces } diff --git a/pkger/parser.go b/pkger/parser.go index cea04daa57d..40dec4d3df0 100644 --- a/pkger/parser.go +++ b/pkger/parser.go @@ -1415,6 +1415,7 @@ func parseChart(r Resource) (chart, []validationErr) { XPos: r.intShort(fieldChartXPos), YPos: r.intShort(fieldChartYPos), FillColumns: r.slcStr(fieldChartFillColumns), + YSeriesColumns: r.slcStr(fieldChartYSeriesColumns), } if presLeg, ok := r[fieldChartLegend].(legend); ok { diff --git a/pkger/parser_models.go b/pkger/parser_models.go index 1bba6b18a0e..cc32adb423c 100644 --- a/pkger/parser_models.go +++ b/pkger/parser_models.go @@ -422,6 +422,7 @@ const ( chartKindHeatMap chartKind = "heatmap" chartKindHistogram chartKind = "histogram" chartKindMarkdown chartKind = "markdown" + chartKindMosaic chartKind = "mosaic" chartKindScatter chartKind = "scatter" chartKindSingleStat chartKind = "single_stat" chartKindSingleStatPlusLine chartKind = "single_stat_plus_line" @@ -432,8 +433,9 @@ const ( func (c chartKind) ok() bool { switch c { case chartKindGauge, chartKindHeatMap, chartKindHistogram, - chartKindMarkdown, chartKindScatter, chartKindSingleStat, - chartKindSingleStatPlusLine, chartKindTable, chartKindXY: + chartKindMarkdown, chartKindMosaic, chartKindScatter, + chartKindSingleStat, chartKindSingleStatPlusLine, chartKindTable, + chartKindXY: return true default: return false @@ -526,6 +528,7 @@ const ( fieldChartTickPrefix = "tickPrefix" fieldChartTickSuffix = "tickSuffix" fieldChartTimeFormat = "timeFormat" + fieldChartYSeriesColumns = "ySeriesColumns" fieldChartWidth = "width" fieldChartXCol = "xCol" fieldChartXPos = "xPos" @@ -551,6 +554,7 @@ type chart struct { Queries queries Axes axes Geom string + YSeriesColumns []string XCol, YCol string XPos, YPos int Height, Width int @@ -620,6 +624,25 @@ func (c chart) properties() influxdb.ViewProperties { Type: influxdb.ViewPropertyTypeMarkdown, Note: c.Note, } + case chartKindMosaic: + return influxdb.MosaicViewProperties{ + Type: influxdb.ViewPropertyTypeMosaic, + Queries: c.Queries.influxDashQueries(), + ViewColors: c.Colors.strings(), + XColumn: c.XCol, + YSeriesColumns: c.YSeriesColumns, + XDomain: c.Axes.get("x").Domain, + YDomain: c.Axes.get("y").Domain, + XPrefix: c.Axes.get("x").Prefix, + YPrefix: c.Axes.get("y").Prefix, + XSuffix: c.Axes.get("x").Suffix, + YSuffix: c.Axes.get("y").Suffix, + XAxisLabel: c.Axes.get("x").Label, + YAxisLabel: c.Axes.get("y").Label, + Note: c.Note, + ShowNoteWhenEmpty: c.NoteOnEmpty, + TimeFormat: c.TimeFormat, + } case chartKindScatter: return influxdb.ScatterViewProperties{ Type: influxdb.ViewPropertyTypeScatter, diff --git a/pkger/parser_test.go b/pkger/parser_test.go index 339fcdcc492..aafe84e5004 100644 --- a/pkger/parser_test.go +++ b/pkger/parser_test.go @@ -1249,6 +1249,48 @@ spec: }) }) + t.Run("mosaic chart", func(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + testfileRunner(t, "testdata/dashboard_mosaic.yml", func(t *testing.T, template *Template) { + sum := template.Summary() + require.Len(t, sum.Dashboards, 1) + + actual := sum.Dashboards[0] + assert.Equal(t, KindDashboard, actual.Kind) + assert.Equal(t, "dash-0", actual.Name) + assert.Equal(t, "a dashboard w/ single mosaic chart", actual.Description) + + require.Len(t, actual.Charts, 1) + actualChart := actual.Charts[0] + assert.Equal(t, 3, actualChart.Height) + assert.Equal(t, 6, actualChart.Width) + assert.Equal(t, 1, actualChart.XPosition) + assert.Equal(t, 2, actualChart.YPosition) + + props, ok := actualChart.Properties.(influxdb.MosaicViewProperties) + require.True(t, ok) + assert.Equal(t, "mosaic note", props.Note) + assert.True(t, props.ShowNoteWhenEmpty) + + require.Len(t, props.Queries, 1) + q := props.Queries[0] + expectedQuery := `from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean")` + assert.Equal(t, expectedQuery, q.Text) + assert.Equal(t, "advanced", q.EditMode) + + assert.Equal(t, []string{"_value", "foo"}, props.YSeriesColumns) + assert.Equal(t, []float64{0, 10}, props.XDomain) + assert.Equal(t, []float64{0, 100}, props.YDomain) + assert.Equal(t, "x_label", props.XAxisLabel) + assert.Equal(t, "y_label", props.YAxisLabel) + assert.Equal(t, "x_prefix", props.XPrefix) + assert.Equal(t, "y_prefix", props.YPrefix) + assert.Equal(t, "x_suffix", props.XSuffix) + assert.Equal(t, "y_suffix", props.YSuffix) + }) + }) + }) + t.Run("scatter chart", func(t *testing.T) { t.Run("happy path", func(t *testing.T) { testfileRunner(t, "testdata/dashboard_scatter", func(t *testing.T, template *Template) { @@ -2058,7 +2100,7 @@ spec: colors: - name: laser type: min - hex: + hex: value: 3.0`, }, { diff --git a/pkger/service_test.go b/pkger/service_test.go index f620e5cf4d9..e2d38944cbd 100644 --- a/pkger/service_test.go +++ b/pkger/service_test.go @@ -2110,6 +2110,31 @@ func TestService(t *testing.T) { }, }, }, + { + name: "mosaic", + expectedView: influxdb.View{ + ViewContents: influxdb.ViewContents{ + Name: "view name", + }, + Properties: influxdb.MosaicViewProperties{ + Type: influxdb.ViewPropertyTypeMosaic, + Note: "a note", + Queries: []influxdb.DashboardQuery{newQuery()}, + ShowNoteWhenEmpty: true, + ViewColors: []string{"#8F8AF4", "#8F8AF4", "#8F8AF4"}, + XColumn: "x", + YSeriesColumns: []string{"y"}, + XDomain: []float64{0, 10}, + YDomain: []float64{0, 100}, + XAxisLabel: "x_label", + XPrefix: "x_prefix", + XSuffix: "x_suffix", + YAxisLabel: "y_label", + YPrefix: "y_prefix", + YSuffix: "y_suffix", + }, + }, + }, { name: "without new name single stat", expectedView: influxdb.View{ diff --git a/pkger/testdata/dashboard_mosaic.yml b/pkger/testdata/dashboard_mosaic.yml new file mode 100644 index 00000000000..01a3cc0144a --- /dev/null +++ b/pkger/testdata/dashboard_mosaic.yml @@ -0,0 +1,42 @@ +apiVersion: influxdata.com/v2alpha1 +kind: Dashboard +metadata: + name: dash-0 +spec: + description: a dashboard w/ single mosaic chart + charts: + - kind: Mosaic + name: mosaic chart + note: mosaic note + noteOnEmpty: true + prefix: sumtin + suffix: days + xPos: 1 + yPos: 2 + xCol: _time + yCol: _value + width: 6 + height: 3 + ySeriesColumns: ["_value", "foo"] + queries: + - query: > + from(bucket: v.bucket) |> range(start: v.timeRangeStart) |> filter(fn: (r) => r._measurement == "mem") |> filter(fn: (r) => r._field == "used_percent") |> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false) |> yield(name: "mean") + colors: + - hex: "#8F8AF4" + - hex: "#F4CF31" + - hex: "#FFFFFF" + axes: + - name : "x" + label: x_label + prefix: x_prefix + suffix: x_suffix + domain: + - 0 + - 10 + - name: "y" + label: y_label + prefix: y_prefix + suffix: y_suffix + domain: + - 0 + - 100 From 41cb12aeec0c8db2a3c7a226f478058a5bddbbf6 Mon Sep 17 00:00:00 2001 From: Johnny Steenbergen Date: Thu, 30 Jul 2020 11:26:17 -0700 Subject: [PATCH 04/13] feat(pkger): add parameterization to dashboard queries references: #18237 --- cmd/influxd/launcher/pkger_test.go | 294 ++++++++++++++++++++++++++-- http/swagger.yml | 7 +- pkger/clone_resource.go | 10 +- pkger/models.go | 3 +- pkger/models_test.go | 2 +- pkger/parser.go | 146 +++++++++++++- pkger/parser_models.go | 247 +++++++++++++++++++---- pkger/parser_test.go | 88 +++++++++ pkger/service.go | 2 +- pkger/service_test.go | 5 +- pkger/testdata/dashboard_params.yml | 49 +++++ 11 files changed, 787 insertions(+), 66 deletions(-) create mode 100644 pkger/testdata/dashboard_params.yml diff --git a/cmd/influxd/launcher/pkger_test.go b/cmd/influxd/launcher/pkger_test.go index 29520f6250f..722ce13f74f 100644 --- a/cmd/influxd/launcher/pkger_test.go +++ b/cmd/influxd/launcher/pkger_test.go @@ -2305,7 +2305,7 @@ func TestLauncher_Pkger(t *testing.T) { }) }) - t.Run("errors incurred during application of package rolls back to state before package", func(t *testing.T) { + t.Run("errors incurred during application of template rolls back to state before template", func(t *testing.T) { stacks, err := svc.ListStacks(ctx, l.Org.ID, pkger.ListFilter{}) require.NoError(t, err) require.Empty(t, stacks) @@ -2560,24 +2560,21 @@ spec: t.Run("dashboards", func(t *testing.T) { newQuery := func() influxdb.DashboardQuery { - q := influxdb.DashboardQuery{ + return influxdb.DashboardQuery{ BuilderConfig: influxdb.BuilderConfig{ Buckets: []string{}, - Tags: nil, + Tags: []struct { + Key string `json:"key"` + Values []string `json:"values"` + AggregateFunctionType string `json:"aggregateFunctionType"` + }{}, Functions: []struct { Name string `json:"name"` }{}, - AggregateWindow: struct { - Period string `json:"period"` - FillValues bool `json:"fillValues"` - }{}, }, Text: "from(v.bucket) |> count()", EditMode: "advanced", } - // TODO: remove this when issue that forced the builder tag to be here to render in UI. - q.BuilderConfig.Tags = append(q.BuilderConfig.Tags, influxdb.NewBuilderTag("_measurement", "filter", "")) - return q } newAxes := func() map[string]influxdb.Axis { @@ -3236,7 +3233,7 @@ spec: }) t.Run("pkg with same bkt-var-label does nto create new resources for them", func(t *testing.T) { - // validate the new package doesn't create new resources for bkts/labels/vars + // validate the new template doesn't create new resources for bkts/labels/vars // since names collide. impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkger.ApplyWithTemplate(newCompletePkg(t))) require.NoError(t, err) @@ -3435,7 +3432,7 @@ spec: }, varArgs.Values) }) - t.Run("error incurs during package application when resources already exist rollsback to prev state", func(t *testing.T) { + t.Run("error incurs during template application when resources already exist rollsback to prev state", func(t *testing.T) { updatePkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(updatePkgYMLStr)) require.NoError(t, err) @@ -3589,7 +3586,7 @@ spec: assert.Equal(t, influxdb.ID(impact.Summary.Buckets[0].ID), ev.Resources[0].ID) }) - t.Run("apply a package with env refs", func(t *testing.T) { + t.Run("apply a template with env refs", func(t *testing.T) { pkgStr := fmt.Sprintf(` apiVersion: %[1]s kind: Bucket @@ -3763,6 +3760,277 @@ spec: assert.Equal(t, "var_threeve", sum.Variables[0].Name) assert.Empty(t, sum.MissingEnvs) }) + + t.Run("apply a template with query refs", func(t *testing.T) { + dashName := "dash-1" + newDashTmpl := func(t *testing.T) *pkger.Template { + t.Helper() + + tmplStr := ` +apiVersion: influxdata.com/v2alpha1 +kind: Dashboard +metadata: + name: %s +spec: + charts: + - kind: Single_Stat + name: single stat + xPos: 1 + yPos: 2 + width: 6 + height: 3 + queries: + - query: | + option params = { + bucket: "foo", + start: -1d, + stop: now(), + name: "max", + floatVal: 1.0, + minVal: 10 + } + + from(bucket: params.bucket) + |> range(start: params.start, end: params.stop) + |> filter(fn: (r) => r._measurement == "processes") + |> filter(fn: (r) => r.floater == params.floatVal) + |> filter(fn: (r) => r._value > params.minVal) + |> aggregateWindow(every: v.windowPeriod, fn: max) + |> yield(name: params.name) + params: + - key: bucket + default: "bar" + type: string + - key: start + type: duration + - key: stop + type: time + - key: floatVal + default: 37.2 + type: float + - key: minVal + type: int + - key: name # infer type + colors: + - name: laser + type: text + hex: "#8F8AF4" + value: 3` + tmplStr = fmt.Sprintf(tmplStr, dashName) + + template, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(tmplStr)) + require.NoError(t, err) + return template + } + + isExpectedQuery := func(t *testing.T, actual pkger.SummaryDashboard, expectedParams string) { + t.Helper() + + require.Len(t, actual.Charts, 1) + + props, ok := actual.Charts[0].Properties.(influxdb.SingleStatViewProperties) + require.True(t, ok, "unexpected chart properties") + + require.Len(t, props.Queries, 1) + + expectedQuery := expectedParams + ` + +from(bucket: params.bucket) + |> range(start: params.start, end: params.stop) + |> filter(fn: (r) => + (r._measurement == "processes")) + |> filter(fn: (r) => + (r.floater == params.floatVal)) + |> filter(fn: (r) => + (r._value > params.minVal)) + |> aggregateWindow(every: v.windowPeriod, fn: max) + |> yield(name: params.name)` + + assert.Equal(t, expectedQuery, props.Queries[0].Text) + assert.Equal(t, "advanced", props.Queries[0].EditMode) + } + + envKey := func(paramKey string) string { + return fmt.Sprintf( + "dashboards[%s].spec.charts[0].queries[0].params.%s", + dashName, + paramKey, + ) + } + + t.Run("using default values", func(t *testing.T) { + stack, cleanup := newStackFn(t, pkger.StackCreate{}) + defer cleanup() + + impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, + pkger.ApplyWithStackID(stack.ID), + pkger.ApplyWithTemplate(newDashTmpl(t)), + ) + require.NoError(t, err) + + require.Len(t, impact.Summary.Dashboards, 1) + + actual := impact.Summary.Dashboards[0] + + expectedParams := `option params = { + bucket: "bar", + start: -24h0m0s, + stop: now(), + name: "max", + floatVal: 37.2, + minVal: 10, +}` + isExpectedQuery(t, actual, expectedParams) + + require.Len(t, actual.EnvReferences, 6) + + expectedRefs := []pkger.SummaryReference{ + { + Field: "spec.charts[0].queries[0].params.bucket", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.bucket`, + ValType: "string", + DefaultValue: "bar", + }, + { + Field: "spec.charts[0].queries[0].params.floatVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.floatVal`, + ValType: "float", + DefaultValue: 37.2, + }, + } + assert.Equal(t, expectedRefs, actual.EnvReferences[:len(expectedRefs)]) + + // check necessary since json can flip int to float type and fail assertions + // in a flakey manner + expectedIntRef := pkger.SummaryReference{ + Field: "spec.charts[0].queries[0].params.minVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.minVal`, + ValType: "integer", + DefaultValue: int64(10), + } + actualIntRef := actual.EnvReferences[len(expectedRefs)] + if f, ok := actualIntRef.DefaultValue.(float64); ok { + actualIntRef.DefaultValue = int64(f) + } + assert.Equal(t, expectedIntRef, actualIntRef) + + expectedRefs = []pkger.SummaryReference{ + { + Field: "spec.charts[0].queries[0].params.name", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.name`, + ValType: "string", + DefaultValue: "max", + }, + { + Field: "spec.charts[0].queries[0].params.start", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.start`, + ValType: "duration", + DefaultValue: "-24h0m0s", + }, + { + Field: "spec.charts[0].queries[0].params.stop", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.stop`, + ValType: "time", + DefaultValue: "now()", + }, + } + assert.Equal(t, expectedRefs, actual.EnvReferences[3:]) + }) + + t.Run("with user provided values", func(t *testing.T) { + stack, cleanup := newStackFn(t, pkger.StackCreate{}) + defer cleanup() + + impact, err := svc.Apply(ctx, l.Org.ID, l.User.ID, + pkger.ApplyWithStackID(stack.ID), + pkger.ApplyWithTemplate(newDashTmpl(t)), + pkger.ApplyWithEnvRefs(map[string]interface{}{ + envKey("bucket"): "foobar", + envKey("name"): "min", + envKey("start"): "-5d", + envKey("floatVal"): 33.3, + envKey("minVal"): 3, + }), + ) + require.NoError(t, err) + + require.Len(t, impact.Summary.Dashboards, 1) + + actual := impact.Summary.Dashboards[0] + + expectedParams := `option params = { + bucket: "foobar", + start: -5d, + stop: now(), + name: "min", + floatVal: 33.3, + minVal: 3, +}` + isExpectedQuery(t, actual, expectedParams) + + require.Len(t, actual.EnvReferences, 6) + + expectedRefs := []pkger.SummaryReference{ + { + Field: "spec.charts[0].queries[0].params.bucket", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.bucket`, + ValType: "string", + Value: "foobar", + DefaultValue: "bar", + }, + { + Field: "spec.charts[0].queries[0].params.floatVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.floatVal`, + ValType: "float", + Value: 33.3, + DefaultValue: 37.2, + }, + } + assert.Equal(t, expectedRefs, actual.EnvReferences[:len(expectedRefs)]) + + // check necessary since json can flip int to float type and fail assertions + // in a flakey manner + expectedIntRef := pkger.SummaryReference{ + Field: "spec.charts[0].queries[0].params.minVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.minVal`, + ValType: "integer", + Value: int64(3), + DefaultValue: int64(10), + } + actualIntRef := actual.EnvReferences[len(expectedRefs)] + if f, ok := actualIntRef.DefaultValue.(float64); ok { + actualIntRef.DefaultValue = int64(f) + } + if f, ok := actualIntRef.Value.(float64); ok { + actualIntRef.Value = int64(f) + } + assert.Equal(t, expectedIntRef, actualIntRef) + + expectedRefs = []pkger.SummaryReference{ + { + Field: "spec.charts[0].queries[0].params.name", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.name`, + ValType: "string", + Value: "min", + DefaultValue: "max", + }, + { + Field: "spec.charts[0].queries[0].params.start", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.start`, + ValType: "duration", + Value: "-5d", + DefaultValue: "-24h0m0s", + }, + { + Field: "spec.charts[0].queries[0].params.stop", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.stop`, + ValType: "time", + DefaultValue: "now()", + }, + } + assert.Equal(t, expectedRefs, actual.EnvReferences[3:]) + }) + }) } func newCompletePkg(t *testing.T) *pkger.Template { diff --git a/http/swagger.yml b/http/swagger.yml index 7fcb88b8390..440db472987 100644 --- a/http/swagger.yml +++ b/http/swagger.yml @@ -7531,8 +7531,13 @@ components: type: string description: Key identified as environment reference and is the key identified in the template value: - type: string description: Value provided to fulfill reference + nullable: true + oneOf: + - type: string + - type: integer + - type: number + - type: boolean defaultValue: description: Default value that will be provided for the reference when no value is provided nullable: true diff --git a/pkger/clone_resource.go b/pkger/clone_resource.go index 4a7d8b14a28..66941a4efb1 100644 --- a/pkger/clone_resource.go +++ b/pkger/clone_resource.go @@ -708,8 +708,14 @@ func convertChartToResource(ch chart) Resource { fieldChartHeight: ch.Height, fieldChartWidth: ch.Width, } - if len(ch.Queries) > 0 { - r[fieldChartQueries] = ch.Queries + var qq []Resource + for _, q := range ch.Queries { + qq = append(qq, Resource{ + fieldQuery: q.DashboardQuery(), + }) + } + if len(qq) > 0 { + r[fieldChartQueries] = qq } if len(ch.Colors) > 0 { r[fieldChartColors] = ch.Colors diff --git a/pkger/models.go b/pkger/models.go index dc7501142b7..33fe38f8135 100644 --- a/pkger/models.go +++ b/pkger/models.go @@ -650,7 +650,8 @@ type SummaryLabelMapping struct { type SummaryReference struct { Field string `json:"resourceField"` EnvRefKey string `json:"envRefKey"` - Value string `json:"value"` + ValType string `json:"valueType"` + Value interface{} `json:"value"` DefaultValue interface{} `json:"defaultValue"` } diff --git a/pkger/models_test.go b/pkger/models_test.go index 1ed0f945a48..04c486324f2 100644 --- a/pkger/models_test.go +++ b/pkger/models_test.go @@ -10,7 +10,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestPkg(t *testing.T) { +func TestTemplate(t *testing.T) { t.Run("Summary", func(t *testing.T) { t.Run("buckets returned in asc order by name", func(t *testing.T) { pkg := Template{ diff --git a/pkger/parser.go b/pkger/parser.go index 40dec4d3df0..c889672a606 100644 --- a/pkger/parser.go +++ b/pkger/parser.go @@ -16,6 +16,9 @@ import ( "strings" "time" + "github.com/influxdata/flux/ast" + "github.com/influxdata/flux/ast/edit" + "github.com/influxdata/flux/parser" "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/pkg/jsonnet" "gopkg.in/yaml.v3" @@ -948,7 +951,7 @@ func (p *Template) graphDashboards() *parseErr { sort.Sort(dash.labels) for i, cr := range o.Spec.slcResource(fieldDashCharts) { - ch, fails := parseChart(cr) + ch, fails := p.parseChart(dash.MetaName(), i, cr) if fails != nil { failures = append(failures, objectValidationErr(fieldSpec, validationErr{ @@ -963,7 +966,7 @@ func (p *Template) graphDashboards() *parseErr { } p.mDashboards[dash.MetaName()] = dash - p.setRefs(dash.name, dash.displayName) + p.setRefs(dash.refs()...) return append(failures, dash.valid()...) }) @@ -1383,10 +1386,10 @@ func (p *Template) setRefs(refs ...*references) { } } -func parseChart(r Resource) (chart, []validationErr) { +func (p *Template) parseChart(dashMetaName string, chartIdx int, r Resource) (*chart, []validationErr) { ck, err := r.chartKind() if err != nil { - return chart{}, []validationErr{{ + return nil, []validationErr{{ Field: fieldKind, Msg: err.Error(), }} @@ -1436,11 +1439,14 @@ func parseChart(r Resource) (chart, []validationErr) { if presentQueries, ok := r[fieldChartQueries].(queries); ok { c.Queries = presentQueries } else { - for _, rq := range r.slcResource(fieldChartQueries) { - c.Queries = append(c.Queries, query{ - Query: strings.TrimSpace(rq.stringShort(fieldQuery)), + q, vErrs := p.parseChartQueries(dashMetaName, chartIdx, r.slcResource(fieldChartQueries)) + if len(vErrs) > 0 { + failures = append(failures, validationErr{ + Field: "queries", + Nested: vErrs, }) } + c.Queries = q } if presentColors, ok := r[fieldChartColors].(colors); ok { @@ -1505,10 +1511,132 @@ func parseChart(r Resource) (chart, []validationErr) { } if failures = append(failures, c.validProperties()...); len(failures) > 0 { - return chart{}, failures + return nil, failures } - return c, nil + return &c, nil +} + +func (p *Template) parseChartQueries(dashMetaName string, chartIdx int, resources []Resource) (queries, []validationErr) { + var ( + q queries + vErrs []validationErr + ) + for i, rq := range resources { + source := rq.stringShort(fieldQuery) + if source == "" { + continue + } + prefix := fmt.Sprintf("dashboards[%s].spec.charts[%d].queries[%d]", dashMetaName, chartIdx, i) + qq, err := p.parseQuery(prefix, source, rq.slcResource(fieldParams)) + if err != nil { + vErrs = append(vErrs, validationErr{ + Field: "query", + Index: intPtr(i), + Msg: err.Error(), + }) + } + q = append(q, qq) + } + return q, vErrs +} + +func (p *Template) parseQuery(prefix, source string, params []Resource) (query, error) { + files := parser.ParseSource(source).Files + if len(files) != 1 { + return query{}, influxErr(influxdb.EInvalid, "invalid query source") + } + + q := query{ + Query: strings.TrimSpace(source), + } + + opt, err := edit.GetOption(files[0], "params") + if err != nil { + return q, nil + } + obj, ok := opt.(*ast.ObjectExpression) + if !ok { + return q, nil + } + + mParams := make(map[string]*references) + for _, p := range obj.Properties { + sl, ok := p.Key.(*ast.Identifier) + if !ok { + continue + } + + mParams[sl.Name] = &references{ + EnvRef: sl.Name, + defaultVal: valFromExpr(p.Value), + valType: p.Value.Type(), + } + } + + for _, pr := range params { + field := pr.stringShort(fieldKey) + if field == "" { + continue + } + + if _, ok := mParams[field]; !ok { + mParams[field] = &references{EnvRef: field} + } + + if def, ok := pr[fieldDefault]; ok { + mParams[field].defaultVal = def + } + if valtype, ok := pr.string(fieldType); ok { + mParams[field].valType = valtype + } + } + + for _, ref := range mParams { + envRef := fmt.Sprintf("%s.params.%s", prefix, ref.EnvRef) + q.params = append(q.params, &references{ + EnvRef: envRef, + defaultVal: ref.defaultVal, + val: p.mEnvVals[envRef], + valType: ref.valType, + }) + } + return q, nil +} + +func valFromExpr(p ast.Expression) interface{} { + switch literal := p.(type) { + case *ast.CallExpression: + sl, ok := literal.Callee.(*ast.Identifier) + if ok && sl.Name == "now" { + return "now()" + } + return nil + case *ast.DateTimeLiteral: + return ast.DateTimeFromLiteral(literal) + case *ast.FloatLiteral: + return ast.FloatFromLiteral(literal) + case *ast.IntegerLiteral: + return ast.IntegerFromLiteral(literal) + case *ast.DurationLiteral: + dur, _ := ast.DurationFrom(literal, time.Time{}) + return dur + case *ast.StringLiteral: + return ast.StringFromLiteral(literal) + case *ast.UnaryExpression: + // a signed duration is represented by a UnaryExpression. + // it is the only unary expression allowed. + v := valFromExpr(literal.Argument) + if dur, ok := v.(time.Duration); ok { + switch literal.Operator { + case ast.SubtractionOperator: + return "-" + dur.String() + } + } + return v + default: + return nil + } } // dns1123LabelMaxLength is a label's max length in DNS (RFC 1123) diff --git a/pkger/parser_models.go b/pkger/parser_models.go index cc32adb423c..e31efeebc57 100644 --- a/pkger/parser_models.go +++ b/pkger/parser_models.go @@ -9,6 +9,9 @@ import ( "strings" "time" + "github.com/influxdata/flux/ast" + "github.com/influxdata/flux/ast/edit" + "github.com/influxdata/flux/parser" "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/notification" icheck "github.com/influxdata/influxdb/v2/notification/check" @@ -63,6 +66,7 @@ const ( fieldName = "name" fieldOffset = "offset" fieldOperator = "operator" + fieldParams = "params" fieldPrefix = "prefix" fieldQuery = "query" fieldSuffix = "suffix" @@ -457,7 +461,7 @@ type dashboard struct { identity Description string - Charts []chart + Charts []*chart labels sortedLabels } @@ -470,8 +474,16 @@ func (d *dashboard) ResourceType() influxdb.ResourceType { return KindDashboard.ResourceType() } +func (d *dashboard) refs() []*references { + var queryRefs []*references + for _, c := range d.Charts { + queryRefs = append(queryRefs, c.Queries.references()...) + } + return append([]*references{d.name, d.displayName}, queryRefs...) +} + func (d *dashboard) summarize() SummaryDashboard { - iDash := SummaryDashboard{ + sum := SummaryDashboard{ SummaryIdentifier: SummaryIdentifier{ Kind: KindDashboard, MetaName: d.MetaName(), @@ -481,16 +493,27 @@ func (d *dashboard) summarize() SummaryDashboard { Description: d.Description, LabelAssociations: toSummaryLabels(d.labels...), } - for _, c := range d.Charts { - iDash.Charts = append(iDash.Charts, SummaryChart{ + + for chartIdx, c := range d.Charts { + sum.Charts = append(sum.Charts, SummaryChart{ Properties: c.properties(), Height: c.Height, Width: c.Width, XPosition: c.XPos, YPosition: c.YPos, }) + for qIdx, q := range c.Queries { + for _, ref := range q.params { + parts := strings.Split(ref.EnvRef, ".") + field := fmt.Sprintf("spec.charts[%d].queries[%d].params.%s", chartIdx, qIdx, parts[len(parts)-1]) + sum.EnvReferences = append(sum.EnvReferences, convertRefToRefSummary(field, ref)) + sort.Slice(sum.EnvReferences, func(i, j int) bool { + return sum.EnvReferences[i].EnvRefKey < sum.EnvReferences[j].EnvRefKey + }) + } + } } - return iDash + return sum } func (d *dashboard) valid() []validationErr { @@ -567,7 +590,7 @@ type chart struct { TimeFormat string } -func (c chart) properties() influxdb.ViewProperties { +func (c *chart) properties() influxdb.ViewProperties { switch c.Kind { case chartKindGauge: return influxdb.GaugeViewProperties{ @@ -752,7 +775,7 @@ func (c chart) properties() influxdb.ViewProperties { } } -func (c chart) validProperties() []validationErr { +func (c *chart) validProperties() []validationErr { if c.Kind == chartKindMarkdown { // at the time of writing, there's nothing to validate for markdown types return nil @@ -804,6 +827,24 @@ func validPosition(pos string) []validationErr { return nil } +func (c *chart) validBaseProps() []validationErr { + var fails []validationErr + if c.Width <= 0 { + fails = append(fails, validationErr{ + Field: fieldChartWidth, + Msg: "must be greater than 0", + }) + } + + if c.Height <= 0 { + fails = append(fails, validationErr{ + Field: fieldChartHeight, + Msg: "must be greater than 0", + }) + } + return fails +} + var geometryTypes = map[string]bool{ "line": true, "step": true, @@ -827,24 +868,6 @@ func validGeometry(geom string) []validationErr { return nil } -func (c chart) validBaseProps() []validationErr { - var fails []validationErr - if c.Width <= 0 { - fails = append(fails, validationErr{ - Field: fieldChartWidth, - Msg: "must be greater than 0", - }) - } - - if c.Height <= 0 { - fails = append(fails, validationErr{ - Field: fieldChartHeight, - Msg: "must be greater than 0", - }) - } - return fails -} - const ( fieldChartFieldOptionDisplayName = "displayName" fieldChartFieldOptionFieldName = "fieldName" @@ -954,7 +977,7 @@ func (c colors) strings() []string { } // TODO: looks like much of these are actually getting defaults in -// the UI. looking at sytem charts, seeign lots of failures for missing +// the UI. looking at system charts, seeing lots of failures for missing // color types or no colors at all. func (c colors) hasTypes(types ...string) []validationErr { tMap := make(map[string]bool) @@ -997,7 +1020,40 @@ func (c colors) valid() []validationErr { } type query struct { - Query string `json:"query" yaml:"query"` + Query string `json:"query" yaml:"query"` + params []*references +} + +func (q query) DashboardQuery() string { + if len(q.params) == 0 { + return q.Query + } + + files := parser.ParseSource(q.Query).Files + if len(files) != 1 { + return q.Query + } + + opt, err := edit.GetOption(files[0], "params") + if err != nil { + // no params option present in query + return q.Query + } + + obj, ok := opt.(*ast.ObjectExpression) + if !ok { + // params option present is invalid. Should always be an Object. + return q.Query + } + + for _, ref := range q.params { + parts := strings.Split(ref.EnvRef, ".") + key := parts[len(parts)-1] + edit.SetProperty(obj, key, ref.expression()) + } + + edit.SetOption(files[0], "params", obj) + return ast.Format(files[0]) } type queries []query @@ -1005,17 +1061,22 @@ type queries []query func (q queries) influxDashQueries() []influxdb.DashboardQuery { var iQueries []influxdb.DashboardQuery for _, qq := range q { - newQuery := influxdb.DashboardQuery{ - Text: qq.Query, + iQueries = append(iQueries, influxdb.DashboardQuery{ + Text: qq.DashboardQuery(), EditMode: "advanced", - } - // TODO: axe this builder configs when issue https://github.com/influxdata/influxdb/issues/15708 is fixed up - newQuery.BuilderConfig.Tags = append(newQuery.BuilderConfig.Tags, influxdb.NewBuilderTag("_measurement", "filter", "")) - iQueries = append(iQueries, newQuery) + }) } return iQueries } +func (q queries) references() []*references { + var refs []*references + for _, qq := range q { + refs = append(refs, qq.params...) + } + return refs +} + const ( fieldAxisBase = "base" fieldAxisLabel = "label" @@ -2078,6 +2139,7 @@ type references struct { val interface{} defaultVal interface{} + valType string } func (r *references) hasValue() bool { @@ -2088,6 +2150,51 @@ func (r *references) hasEnvRef() bool { return r != nil && r.EnvRef != "" } +func (r *references) expression() ast.Expression { + v := r.val + if v == nil { + v = r.defaultVal + } + if v == nil { + return nil + } + + switch strings.ToLower(r.valType) { + case "bool", "booleanliteral": + return astBoolFromIface(v) + case "duration", "durationliteral": + return astDurationFromIface(v) + case "float", "floatliteral": + return astFloatFromIface(v) + case "int", "integerliteral": + return astIntegerFromIface(v) + case "string", "stringliteral": + return astStringFromIface(v) + case "time", "datetimeliteral": + if v == "now()" { + return astNow() + } + return astTimeFromIface(v) + } + return nil +} + +func (r *references) Float64() float64 { + if r == nil || r.val == nil { + return 0 + } + i, _ := r.val.(float64) + return i +} + +func (r *references) Int64() int64 { + if r == nil || r.val == nil { + return 0 + } + i, _ := r.val.(int64) + return i +} + func (r *references) String() string { if r == nil { return "" @@ -2120,14 +2227,86 @@ func (r *references) SecretField() influxdb.SecretField { } func convertRefToRefSummary(field string, ref *references) SummaryReference { + var valType string + switch strings.ToLower(ref.valType) { + case "bool", "booleanliteral": + valType = "bool" + case "duration", "durationliteral": + valType = "duration" + case "float", "floatliteral": + valType = "float" + case "int", "integerliteral": + valType = "integer" + case "string", "stringliteral": + valType = "string" + case "time", "datetimeliteral": + valType = "time" + } + return SummaryReference{ Field: field, EnvRefKey: ref.EnvRef, - Value: ref.StringVal(), + ValType: valType, + Value: ref.val, DefaultValue: ref.defaultVal, } } +func astBoolFromIface(v interface{}) *ast.BooleanLiteral { + b, _ := v.(bool) + return ast.BooleanLiteralFromValue(b) +} + +func astDurationFromIface(v interface{}) *ast.DurationLiteral { + s, ok := v.(string) + if !ok { + return nil + } + dur, _ := parser.ParseSignedDuration(s) + return dur +} + +func astFloatFromIface(v interface{}) *ast.FloatLiteral { + if i, ok := v.(int); ok { + return ast.FloatLiteralFromValue(float64(i)) + } + f, _ := v.(float64) + return ast.FloatLiteralFromValue(f) +} + +func astIntegerFromIface(v interface{}) *ast.IntegerLiteral { + if f, ok := v.(float64); ok { + return ast.IntegerLiteralFromValue(int64(f)) + } + i, _ := v.(int64) + return ast.IntegerLiteralFromValue(i) +} + +func astNow() *ast.CallExpression { + return &ast.CallExpression{ + Callee: &ast.Identifier{Name: "now"}, + } +} + +func astStringFromIface(v interface{}) *ast.StringLiteral { + s, _ := v.(string) + return ast.StringLiteralFromValue(s) +} + +func astTimeFromIface(v interface{}) *ast.DateTimeLiteral { + if t, ok := v.(time.Time); ok { + return ast.DateTimeLiteralFromValue(t) + } + + s, ok := v.(string) + if !ok { + return nil + } + + t, _ := parser.ParseTime(s) + return t +} + func isValidName(name string, minLength int) (validationErr, bool) { if len(name) >= minLength { return validationErr{}, true diff --git a/pkger/parser_test.go b/pkger/parser_test.go index aafe84e5004..91509665e20 100644 --- a/pkger/parser_test.go +++ b/pkger/parser_test.go @@ -2330,6 +2330,94 @@ spec: }) }) + t.Run("with params option should be parameterizable", func(t *testing.T) { + testfileRunner(t, "testdata/dashboard_params.yml", func(t *testing.T, template *Template) { + sum := template.Summary() + require.Len(t, sum.Dashboards, 1) + + actual := sum.Dashboards[0] + assert.Equal(t, KindDashboard, actual.Kind) + assert.Equal(t, "dash-1", actual.MetaName) + + require.Len(t, actual.Charts, 1) + actualChart := actual.Charts[0] + assert.Equal(t, 3, actualChart.Height) + assert.Equal(t, 6, actualChart.Width) + assert.Equal(t, 1, actualChart.XPosition) + assert.Equal(t, 2, actualChart.YPosition) + + props, ok := actualChart.Properties.(influxdb.SingleStatViewProperties) + require.True(t, ok) + assert.Equal(t, "single-stat", props.GetType()) + + require.Len(t, props.Queries, 1) + + queryText := `option params = { + bucket: "bar", + start: -24h0m0s, + stop: now(), + name: "max", + floatVal: 37.2, + minVal: 10, +} + +from(bucket: params.bucket) + |> range(start: params.start, end: params.stop) + |> filter(fn: (r) => + (r._measurement == "processes")) + |> filter(fn: (r) => + (r.floater == params.floatVal)) + |> filter(fn: (r) => + (r._value > params.minVal)) + |> aggregateWindow(every: v.windowPeriod, fn: max) + |> yield(name: params.name)` + + q := props.Queries[0] + assert.Equal(t, queryText, q.Text) + assert.Equal(t, "advanced", q.EditMode) + + expectedRefs := []SummaryReference{ + { + Field: "spec.charts[0].queries[0].params.bucket", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.bucket`, + ValType: "string", + DefaultValue: "bar", + }, + { + Field: "spec.charts[0].queries[0].params.floatVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.floatVal`, + ValType: "float", + DefaultValue: 37.2, + }, + { + Field: "spec.charts[0].queries[0].params.minVal", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.minVal`, + ValType: "integer", + DefaultValue: int64(10), + }, + { + Field: "spec.charts[0].queries[0].params.name", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.name`, + ValType: "string", + DefaultValue: "max", + }, + { + Field: "spec.charts[0].queries[0].params.start", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.start`, + ValType: "duration", + DefaultValue: "-24h0m0s", + }, + { + Field: "spec.charts[0].queries[0].params.stop", + EnvRefKey: `dashboards[dash-1].spec.charts[0].queries[0].params.stop`, + ValType: "time", + DefaultValue: "now()", + }, + } + assert.Equal(t, expectedRefs, actual.EnvReferences) + }) + }) + t.Run("with env refs should be valid", func(t *testing.T) { testfileRunner(t, "testdata/dashboard_ref.yml", func(t *testing.T, template *Template) { actual := template.Summary().Dashboards diff --git a/pkger/service.go b/pkger/service.go index 797fa6289fa..c9c244c24c9 100644 --- a/pkger/service.go +++ b/pkger/service.go @@ -1968,7 +1968,7 @@ func (s *Service) rollbackDashboards(ctx context.Context, dashs []*stateDashboar return nil } -func convertChartsToCells(ch []chart) []*influxdb.Cell { +func convertChartsToCells(ch []*chart) []*influxdb.Cell { icells := make([]*influxdb.Cell, 0, len(ch)) for _, c := range ch { icell := &influxdb.Cell{ diff --git a/pkger/service_test.go b/pkger/service_test.go index e2d38944cbd..e5259cd522c 100644 --- a/pkger/service_test.go +++ b/pkger/service_test.go @@ -1962,13 +1962,10 @@ func TestService(t *testing.T) { }) newQuery := func() influxdb.DashboardQuery { - q := influxdb.DashboardQuery{ + return influxdb.DashboardQuery{ Text: "from(v.bucket) |> count()", EditMode: "advanced", } - // TODO: remove this when issue that forced the builder tag to be here to render in UI. - q.BuilderConfig.Tags = append(q.BuilderConfig.Tags, influxdb.NewBuilderTag("_measurement", "filter", "")) - return q } newAxes := func() map[string]influxdb.Axis { diff --git a/pkger/testdata/dashboard_params.yml b/pkger/testdata/dashboard_params.yml new file mode 100644 index 00000000000..ca883512162 --- /dev/null +++ b/pkger/testdata/dashboard_params.yml @@ -0,0 +1,49 @@ +apiVersion: influxdata.com/v2alpha1 +kind: Dashboard +metadata: + name: dash-1 +spec: + charts: + - kind: Single_Stat + name: single stat + xPos: 1 + yPos: 2 + width: 6 + height: 3 + queries: + - query: | + option params = { + bucket: "foo", + start: -1d, + stop: now(), + name: "max", + floatVal: 1.0, + minVal: 10 + } + + from(bucket: params.bucket) + |> range(start: params.start, end: params.stop) + |> filter(fn: (r) => r._measurement == "processes") + |> filter(fn: (r) => r.floater == params.floatVal) + |> filter(fn: (r) => r._value > params.minVal) + |> aggregateWindow(every: v.windowPeriod, fn: max) + |> yield(name: params.name) + params: + - key: bucket + default: "bar" + type: string + - key: start + type: duration + - key: stop + type: time + - key: floatVal + default: 37.2 + type: float + - key: minVal + type: int + - key: name # infer type + colors: + - name: laser + type: text + hex: "#8F8AF4" + value: 3 From 3882b391528576e5ee8ee3cfa195c7a7ce12333e Mon Sep 17 00:00:00 2001 From: Ariel Salem Date: Wed, 5 Aug 2020 04:52:30 -0700 Subject: [PATCH 05/13] fix(flashing-cells): prevent flashing cells by setting all the cell data when the dashboard is loaded (#19212) --- ui/src/dashboards/actions/thunks.ts | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/ui/src/dashboards/actions/thunks.ts b/ui/src/dashboards/actions/thunks.ts index 4b9f6bb185d..20bc83c789c 100644 --- a/ui/src/dashboards/actions/thunks.ts +++ b/ui/src/dashboards/actions/thunks.ts @@ -358,17 +358,25 @@ export const getDashboard = ( const cellViews: CellsWithViewProperties = resp.data.cells || [] const viewsData = viewsFromCells(cellViews, dashboardID) + setTimeout(() => { + const normCells = normalize( + cellViews, + arrayOfCells + ) - const normViews = normalize( - viewsData, - arrayOfViews - ) - - dispatch(setViews(RemoteDataState.Done, normViews)) + dispatch(setCells(dashboardID, RemoteDataState.Done, normCells)) + const normViews = normalize( + viewsData, + arrayOfViews + ) - // Now that all the necessary state has been loaded, set the dashboard - dispatch(creators.setDashboard(dashboardID, RemoteDataState.Done, normDash)) - dispatch(updateTimeRangeFromQueryParams(dashboardID)) + dispatch(setViews(RemoteDataState.Done, normViews)) + // Now that all the necessary state has been loaded, set the dashboard + dispatch( + creators.setDashboard(dashboardID, RemoteDataState.Done, normDash) + ) + dispatch(updateTimeRangeFromQueryParams(dashboardID)) + }, 0) } catch (error) { if (error.name === 'AbortError') { return From b484bfc34f2305ee661152b207a7b5c8d6df43eb Mon Sep 17 00:00:00 2001 From: Adrian Thurston Date: Wed, 5 Aug 2020 16:25:56 +0300 Subject: [PATCH 06/13] fix: run apt-get update before any apt-get install commands (#19220) It's possible that the apt cache on the circle ci system isn't up to date. Call apt-get update before any installs. --- .circleci/config.yml | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 69e1624d45a..88429162e9e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -7,7 +7,7 @@ commands: steps: - run: name: Install clang - command: sudo apt-get install -y --no-install-recommends clang musl-tools + command: sudo apt-get update && sudo apt-get install -y --no-install-recommends clang musl-tools - run: name: Install rust compiler command: | @@ -22,13 +22,13 @@ commands: - run: name: Install linux cross compilers command: > - sudo apt-get install -y --no-install-recommends - gcc-arm-linux-gnueabihf libc6-dev-armhf-cross - gcc-aarch64-linux-gnu libc6-dev-arm64-cross + sudo apt-get update && sudo apt-get install -y --no-install-recommends + gcc-arm-linux-gnueabihf libc6-dev-armhf-cross + gcc-aarch64-linux-gnu libc6-dev-arm64-cross - run: name: Install macOS cross compilers command: | - sudo apt-get install -y --no-install-recommends \ + sudo apt-get update && sudo apt-get install -y --no-install-recommends \ cmake patch libxml2-dev libssl-dev zlib1g-dev sudo mkdir -p /opt/osxcross cd /opt @@ -143,6 +143,7 @@ jobs: name: Restoring GOPATH/pkg/mod keys: - influxdb-gomod-{{ checksum "go.sum" }} # Just match the go.sum checksum cache. + - run: sudo apt-get update - run: sudo apt-get install -y netcat-openbsd - run: sudo apt-get install -y bzr - install_rust_compiler @@ -289,7 +290,7 @@ jobs: name: Restoring GOPATH/pkg/mod keys: - influxdb-gomod-{{ checksum "go.sum" }} # Matches based on go.sum checksum. - - run: sudo apt-get install -y bzr + - run: sudo apt-get update && sudo apt-get install -y bzr - install_rust_compiler - run: mkdir -p $TEST_RESULTS - run: make test-go # This uses the test cache so it may succeed or fail quickly. @@ -340,7 +341,7 @@ jobs: working_directory: /go/src/github.com/influxdata/influxdb steps: - checkout - - run: sudo apt-get install -y bzr + - run: sudo apt-get update && sudo apt-get install -y bzr - install_rust_compiler - run: mkdir -p $TEST_RESULTS - run: | @@ -373,7 +374,7 @@ jobs: working_directory: /go/src/github.com/influxdata/influxdb steps: - checkout - - run: sudo apt-get install -y bzr + - run: sudo apt-get update && sudo apt-get install -y bzr - install_rust_compiler - run: make checkcommit @@ -419,7 +420,7 @@ jobs: - run: name: "Docker Login" command: docker login -u "$QUAY_USER" -p $QUAY_PASS quay.io - - run: sudo apt-get install -y bzr + - run: sudo apt-get update && sudo apt-get install -y bzr - install_rust_compiler - install_release_tools - run: make protoc # installs protoc @@ -456,7 +457,7 @@ jobs: - run: name: "Docker Login" command: docker login -u "$QUAY_USER" -p $QUAY_PASS quay.io - - run: sudo apt-get install -y bzr + - run: sudo apt-get update && sudo apt-get install -y bzr - install_rust_compiler - install_release_tools - run: make protoc # installs protoc From d48dc690a6ab74902e7417ec8058365751dd077e Mon Sep 17 00:00:00 2001 From: Faith Chikwekwe Date: Wed, 5 Aug 2020 07:40:26 -0700 Subject: [PATCH 07/13] feat(query/stdlib): add min and max to ReadGroup (#19158) Enables the mix and max aggregates for the ReadGroupAggregte pushdown behind a feature flag. Co-authored-by: Jonathan A. Sternberg --- cmd/influxd/launcher/query_test.go | 75 ++ flags.yml | 6 + kit/feature/list.go | 16 + query/stdlib/influxdata/influxdb/rules.go | 30 + .../stdlib/influxdata/influxdb/rules_test.go | 64 +- storage/flux/reader.go | 7 + storage/flux/table.gen.go | 723 ++++++++++++++---- storage/flux/table.gen.go.tmpl | 184 ++++- storage/flux/table.go | 8 +- storage/flux/table_test.go | 109 +++ 10 files changed, 1015 insertions(+), 207 deletions(-) diff --git a/cmd/influxd/launcher/query_test.go b/cmd/influxd/launcher/query_test.go index 809bcb474f0..47157d3a96f 100644 --- a/cmd/influxd/launcher/query_test.go +++ b/cmd/influxd/launcher/query_test.go @@ -2235,6 +2235,80 @@ from(bucket: v.bucket) ,result,table,kk,_value ,,0,kk0,32 ,,1,kk1,35 +`, + }, + { + name: "min group", + data: []string{ + "m0,k=k0,kk=kk0 f=0i 0", + "m0,k=k0,kk=kk1 f=1i 1000000000", + "m0,k=k0,kk=kk0 f=2i 2000000000", + "m0,k=k0,kk=kk1 f=3i 3000000000", + "m0,k=k0,kk=kk0 f=4i 4000000000", + "m0,k=k0,kk=kk1 f=5i 5000000000", + "m0,k=k0,kk=kk0 f=6i 6000000000", + "m0,k=k0,kk=kk1 f=5i 7000000000", + "m0,k=k0,kk=kk0 f=0i 8000000000", + "m0,k=k0,kk=kk1 f=6i 9000000000", + "m0,k=k0,kk=kk0 f=6i 10000000000", + "m0,k=k0,kk=kk1 f=7i 11000000000", + "m0,k=k0,kk=kk0 f=5i 12000000000", + "m0,k=k0,kk=kk1 f=8i 13000000000", + "m0,k=k0,kk=kk0 f=9i 14000000000", + "m0,k=k0,kk=kk1 f=5i 15000000000", + }, + op: "readGroup(min)", + query: ` +from(bucket: v.bucket) + |> range(start: 1970-01-01T00:00:00Z, stop: 1970-01-01T00:00:15Z) + |> group(columns: ["kk"]) + |> min() + |> keep(columns: ["kk", "_value"]) +`, + want: ` +#datatype,string,long,string,long +#group,false,false,true,false +#default,_result,,, +,result,table,kk,_value +,,0,kk0,0 +,,1,kk1,1 +`, + }, + { + name: "max group", + data: []string{ + "m0,k=k0,kk=kk0 f=0i 0", + "m0,k=k0,kk=kk1 f=1i 1000000000", + "m0,k=k0,kk=kk0 f=2i 2000000000", + "m0,k=k0,kk=kk1 f=3i 3000000000", + "m0,k=k0,kk=kk0 f=4i 4000000000", + "m0,k=k0,kk=kk1 f=5i 5000000000", + "m0,k=k0,kk=kk0 f=6i 6000000000", + "m0,k=k0,kk=kk1 f=5i 7000000000", + "m0,k=k0,kk=kk0 f=0i 8000000000", + "m0,k=k0,kk=kk1 f=6i 9000000000", + "m0,k=k0,kk=kk0 f=6i 10000000000", + "m0,k=k0,kk=kk1 f=7i 11000000000", + "m0,k=k0,kk=kk0 f=5i 12000000000", + "m0,k=k0,kk=kk1 f=8i 13000000000", + "m0,k=k0,kk=kk0 f=9i 14000000000", + "m0,k=k0,kk=kk1 f=5i 15000000000", + }, + op: "readGroup(max)", + query: ` +from(bucket: v.bucket) + |> range(start: 1970-01-01T00:00:00Z, stop: 1970-01-01T00:00:15Z) + |> group(columns: ["kk"]) + |> max() + |> keep(columns: ["kk", "_value"]) +`, + want: ` +#datatype,string,long,string,long +#group,false,false,true,false +#default,_result,,, +,result,table,kk,_value +,,0,kk0,9 +,,1,kk1,8 `, }, } @@ -2247,6 +2321,7 @@ from(bucket: v.bucket) feature.PushDownWindowAggregateMean(): true, feature.PushDownWindowAggregateMin(): true, feature.PushDownWindowAggregateMax(): true, + feature.PushDownGroupAggregateMinMax(): true, })) l.SetupOrFail(t) diff --git a/flags.yml b/flags.yml index 11cc8b3b5da..725d9d2bc78 100644 --- a/flags.yml +++ b/flags.yml @@ -151,3 +151,9 @@ contact: Monitoring Team lifetime: temporary expose: true + +- name: Push Down Group Aggregate Min Max + description: Enable the min and max variants of the PushDownGroupAggregate planner rule + key: pushDownGroupAggregateMinMax + default: false + contact: Query Team diff --git a/kit/feature/list.go b/kit/feature/list.go index 67c84a92160..66922fb35af 100644 --- a/kit/feature/list.go +++ b/kit/feature/list.go @@ -282,6 +282,20 @@ func Notebooks() BoolFlag { return notebooks } +var pushDownGroupAggregateMinMax = MakeBoolFlag( + "Push Down Group Aggregate Min Max", + "pushDownGroupAggregateMinMax", + "Query Team", + false, + Temporary, + false, +) + +// PushDownGroupAggregateMinMax - Enable the min and max variants of the PushDownGroupAggregate planner rule +func PushDownGroupAggregateMinMax() BoolFlag { + return pushDownGroupAggregateMinMax +} + var all = []Flag{ appMetrics, backendExample, @@ -303,6 +317,7 @@ var all = []Flag{ useUserPermission, mergeFiltersRule, notebooks, + pushDownGroupAggregateMinMax, } var byKey = map[string]Flag{ @@ -326,4 +341,5 @@ var byKey = map[string]Flag{ "useUserPermission": useUserPermission, "mergeFiltersRule": mergeFiltersRule, "notebooks": notebooks, + "pushDownGroupAggregateMinMax": pushDownGroupAggregateMinMax, } diff --git a/query/stdlib/influxdata/influxdb/rules.go b/query/stdlib/influxdata/influxdb/rules.go index 62ca204fb0d..ac2f2308663 100644 --- a/query/stdlib/influxdata/influxdb/rules.go +++ b/query/stdlib/influxdata/influxdb/rules.go @@ -1023,6 +1023,8 @@ func (rule PushDownGroupAggregateRule) Pattern() plan.Pattern { universe.SumKind, universe.FirstKind, universe.LastKind, + universe.MinKind, + universe.MaxKind, }, plan.Pat(ReadGroupPhysKind)) } @@ -1075,6 +1077,28 @@ func (PushDownGroupAggregateRule) Rewrite(ctx context.Context, pn plan.Node) (pl AggregateMethod: universe.LastKind, }) return node, true, nil + case universe.MinKind: + // ReadGroup() -> min => ReadGroup(min) + if feature.PushDownGroupAggregateMinMax().Enabled(ctx) { + node := plan.CreatePhysicalNode("ReadGroupAggregate", &ReadGroupPhysSpec{ + ReadRangePhysSpec: group.ReadRangePhysSpec, + GroupMode: group.GroupMode, + GroupKeys: group.GroupKeys, + AggregateMethod: universe.MinKind, + }) + return node, true, nil + } + case universe.MaxKind: + // ReadGroup() -> max => ReadGroup(max) + if feature.PushDownGroupAggregateMinMax().Enabled(ctx) { + node := plan.CreatePhysicalNode("ReadGroupAggregate", &ReadGroupPhysSpec{ + ReadRangePhysSpec: group.ReadRangePhysSpec, + GroupMode: group.GroupMode, + GroupKeys: group.GroupKeys, + AggregateMethod: universe.MaxKind, + }) + return node, true, nil + } } return pn, false, nil } @@ -1102,6 +1126,12 @@ func canPushGroupedAggregate(ctx context.Context, pn plan.Node) bool { case universe.LastKind: agg := pn.ProcedureSpec().(*universe.LastProcedureSpec) return caps.HaveLast() && agg.Column == execute.DefaultValueColLabel + case universe.MaxKind: + agg := pn.ProcedureSpec().(*universe.MaxProcedureSpec) + return caps.HaveMax() && agg.Column == execute.DefaultValueColLabel + case universe.MinKind: + agg := pn.ProcedureSpec().(*universe.MinProcedureSpec) + return caps.HaveMin() && agg.Column == execute.DefaultValueColLabel } return false } diff --git a/query/stdlib/influxdata/influxdb/rules_test.go b/query/stdlib/influxdata/influxdb/rules_test.go index 39c7738ac4b..38553daaeeb 100644 --- a/query/stdlib/influxdata/influxdb/rules_test.go +++ b/query/stdlib/influxdata/influxdb/rules_test.go @@ -2672,7 +2672,9 @@ func TestPushDownBareAggregateRule(t *testing.T) { // func TestPushDownGroupAggregateRule(t *testing.T) { // Turn on all flags - ctx, _ := feature.Annotate(context.Background(), mock.NewFlagger(map[feature.Flag]interface{}{})) + ctx, _ := feature.Annotate(context.Background(), mock.NewFlagger(map[feature.Flag]interface{}{ + feature.PushDownGroupAggregateMinMax(): true, + })) caps := func(c query.GroupCapability) context.Context { deps := influxdb.StorageDependencies{ @@ -2726,6 +2728,20 @@ func TestPushDownGroupAggregateRule(t *testing.T) { }, } } + minProcedureSpecVal := func() *universe.MinProcedureSpec { + return &universe.MinProcedureSpec{ + SelectorConfig: execute.SelectorConfig{ + Column: execute.DefaultValueColLabel, + }, + } + } + maxProcedureSpecVal := func() *universe.MaxProcedureSpec { + return &universe.MaxProcedureSpec{ + SelectorConfig: execute.SelectorConfig{ + Column: execute.DefaultValueColLabel, + }, + } + } countProcedureSpec := func() *universe.CountProcedureSpec { return &universe.CountProcedureSpec{ AggregateConfig: execute.DefaultAggregateConfig, @@ -2829,12 +2845,56 @@ func TestPushDownGroupAggregateRule(t *testing.T) { // ReadGroup() -> last => ReadGroup() -> last tests = append(tests, plantest.RuleTestCase{ Context: caps(mockGroupCapability{}), - Name: "RewriteGroupLast", + Name: "NoLastCapability", Rules: []plan.Rule{influxdb.PushDownGroupAggregateRule{}}, Before: simplePlanWithAgg("last", lastProcedureSpec()), NoChange: true, }) + // ReadGroup() -> max => ReadGroup(max) + tests = append(tests, plantest.RuleTestCase{ + Context: caps(mockGroupCapability{max: true}), + Name: "RewriteGroupMax", + Rules: []plan.Rule{influxdb.PushDownGroupAggregateRule{}}, + Before: simplePlanWithAgg("max", maxProcedureSpecVal()), + After: &plantest.PlanSpec{ + Nodes: []plan.Node{ + plan.CreateLogicalNode("ReadGroupAggregate", readGroupAgg("max")), + }, + }, + }) + + // ReadGroup() -> max => ReadGroup() -> max + tests = append(tests, plantest.RuleTestCase{ + Context: caps(mockGroupCapability{}), + Name: "NoMaxCapability", + Rules: []plan.Rule{influxdb.PushDownGroupAggregateRule{}}, + Before: simplePlanWithAgg("max", maxProcedureSpecVal()), + NoChange: true, + }) + + // ReadGroup() -> min => ReadGroup(min) + tests = append(tests, plantest.RuleTestCase{ + Context: caps(mockGroupCapability{min: true}), + Name: "RewriteGroupMin", + Rules: []plan.Rule{influxdb.PushDownGroupAggregateRule{}}, + Before: simplePlanWithAgg("min", minProcedureSpecVal()), + After: &plantest.PlanSpec{ + Nodes: []plan.Node{ + plan.CreateLogicalNode("ReadGroupAggregate", readGroupAgg("min")), + }, + }, + }) + + // ReadGroup() -> min => ReadGroup() -> min + tests = append(tests, plantest.RuleTestCase{ + Context: caps(mockGroupCapability{}), + Name: "NoMinCapability", + Rules: []plan.Rule{influxdb.PushDownGroupAggregateRule{}}, + Before: simplePlanWithAgg("min", minProcedureSpecVal()), + NoChange: true, + }) + // Rewrite with successors // ReadGroup() -> count -> sum {2} => ReadGroup(count) -> sum {2} tests = append(tests, plantest.RuleTestCase{ diff --git a/storage/flux/reader.go b/storage/flux/reader.go index 9b10519d2fd..13a94cdf5a0 100644 --- a/storage/flux/reader.go +++ b/storage/flux/reader.go @@ -11,6 +11,7 @@ import ( "github.com/influxdata/flux/memory" "github.com/influxdata/flux/plan" "github.com/influxdata/flux/values" + "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/kit/errors" "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/query" @@ -273,6 +274,12 @@ func (gi *groupIterator) Do(f func(flux.Table) error) error { req.Range.Start = int64(gi.spec.Bounds.Start) req.Range.End = int64(gi.spec.Bounds.Stop) + if len(gi.spec.GroupKeys) > 0 && gi.spec.GroupMode == query.GroupModeNone { + return &influxdb.Error{ + Code: influxdb.EInternal, + Msg: "cannot have group mode none with group key values", + } + } req.Group = convertGroupMode(gi.spec.GroupMode) req.GroupKeys = gi.spec.GroupKeys diff --git a/storage/flux/table.gen.go b/storage/flux/table.gen.go index 6f9046afc5a..bd0b0b9449b 100644 --- a/storage/flux/table.gen.go +++ b/storage/flux/table.gen.go @@ -7,6 +7,7 @@ package storageflux import ( + "fmt" "math" "sync" @@ -746,49 +747,29 @@ func (t *floatGroupTable) advance() bool { return true } - // handle the group with aggregate case - var value float64 - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determineFloatAggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []float64{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - panic("unsupported for aggregate count: Float") - case datatypes.AggregateTypeSum: - value += arr.Values[i] - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -801,6 +782,113 @@ func (t *floatGroupTable) advance() bool { return true } +type floatAggregateMethod func([]int64, []float64) (int64, float64) + +// determineFloatAggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determineFloatAggregateMethod(agg datatypes.Aggregate_AggregateType) (floatAggregateMethod, error) { + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroupsFloat, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroupsFloat, nil + case datatypes.AggregateTypeCount: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate count: Float", + } + + case datatypes.AggregateTypeSum: + + return aggregateSumGroupsFloat, nil + + case datatypes.AggregateTypeMin: + + return aggregateMinGroupsFloat, nil + + case datatypes.AggregateTypeMax: + + return aggregateMaxGroupsFloat, nil + + default: + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +func aggregateMinGroupsFloat(timestamps []int64, values []float64) (int64, float64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value > values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateMaxGroupsFloat(timestamps []int64, values []float64) (int64, float64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value < values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. + +func aggregateSumGroupsFloat(_ []int64, values []float64) (int64, float64) { + var sum float64 + for _, v := range values { + sum += v + } + return math.MaxInt64, sum +} + +func aggregateFirstGroupsFloat(timestamps []int64, values []float64) (int64, float64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroupsFloat(timestamps []int64, values []float64) (int64, float64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *floatGroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil @@ -1567,49 +1655,29 @@ func (t *integerGroupTable) advance() bool { return true } - // handle the group with aggregate case - var value int64 - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determineIntegerAggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []int64{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - fallthrough - case datatypes.AggregateTypeSum: - value += arr.Values[i] - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -1622,6 +1690,114 @@ func (t *integerGroupTable) advance() bool { return true } +type integerAggregateMethod func([]int64, []int64) (int64, int64) + +// determineIntegerAggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determineIntegerAggregateMethod(agg datatypes.Aggregate_AggregateType) (integerAggregateMethod, error) { + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroupsInteger, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroupsInteger, nil + case datatypes.AggregateTypeCount: + + return aggregateCountGroupsInteger, nil + + case datatypes.AggregateTypeSum: + + return aggregateSumGroupsInteger, nil + + case datatypes.AggregateTypeMin: + + return aggregateMinGroupsInteger, nil + + case datatypes.AggregateTypeMax: + + return aggregateMaxGroupsInteger, nil + + default: + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +func aggregateMinGroupsInteger(timestamps []int64, values []int64) (int64, int64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value > values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateMaxGroupsInteger(timestamps []int64, values []int64) (int64, int64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value < values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. + +func aggregateCountGroupsInteger(timestamps []int64, values []int64) (int64, int64) { + return aggregateSumGroupsInteger(timestamps, values) +} + +func aggregateSumGroupsInteger(_ []int64, values []int64) (int64, int64) { + var sum int64 + for _, v := range values { + sum += v + } + return math.MaxInt64, sum +} + +func aggregateFirstGroupsInteger(timestamps []int64, values []int64) (int64, int64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroupsInteger(timestamps []int64, values []int64) (int64, int64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *integerGroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil @@ -2386,49 +2562,29 @@ func (t *unsignedGroupTable) advance() bool { return true } - // handle the group with aggregate case - var value uint64 - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determineUnsignedAggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []uint64{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - panic("unsupported for aggregate count: Unsigned") - case datatypes.AggregateTypeSum: - value += arr.Values[i] - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -2441,6 +2597,113 @@ func (t *unsignedGroupTable) advance() bool { return true } +type unsignedAggregateMethod func([]int64, []uint64) (int64, uint64) + +// determineUnsignedAggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determineUnsignedAggregateMethod(agg datatypes.Aggregate_AggregateType) (unsignedAggregateMethod, error) { + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroupsUnsigned, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroupsUnsigned, nil + case datatypes.AggregateTypeCount: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate count: Unsigned", + } + + case datatypes.AggregateTypeSum: + + return aggregateSumGroupsUnsigned, nil + + case datatypes.AggregateTypeMin: + + return aggregateMinGroupsUnsigned, nil + + case datatypes.AggregateTypeMax: + + return aggregateMaxGroupsUnsigned, nil + + default: + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +func aggregateMinGroupsUnsigned(timestamps []int64, values []uint64) (int64, uint64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value > values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateMaxGroupsUnsigned(timestamps []int64, values []uint64) (int64, uint64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value < values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. + +func aggregateSumGroupsUnsigned(_ []int64, values []uint64) (int64, uint64) { + var sum uint64 + for _, v := range values { + sum += v + } + return math.MaxInt64, sum +} + +func aggregateFirstGroupsUnsigned(timestamps []int64, values []uint64) (int64, uint64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroupsUnsigned(timestamps []int64, values []uint64) (int64, uint64) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *unsignedGroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil @@ -3205,49 +3468,29 @@ func (t *stringGroupTable) advance() bool { return true } - // handle the group with aggregate case - var value string - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determineStringAggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []string{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - panic("unsupported for aggregate count: String") - case datatypes.AggregateTypeSum: - panic("unsupported for aggregate sum: String") - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -3260,6 +3503,86 @@ func (t *stringGroupTable) advance() bool { return true } +type stringAggregateMethod func([]int64, []string) (int64, string) + +// determineStringAggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determineStringAggregateMethod(agg datatypes.Aggregate_AggregateType) (stringAggregateMethod, error) { + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroupsString, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroupsString, nil + case datatypes.AggregateTypeCount: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate count: String", + } + + case datatypes.AggregateTypeSum: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate sum: String", + } + + case datatypes.AggregateTypeMin: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate min: String", + } + + case datatypes.AggregateTypeMax: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate max: String", + } + + default: + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. + +func aggregateFirstGroupsString(timestamps []int64, values []string) (int64, string) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroupsString(timestamps []int64, values []string) (int64, string) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *stringGroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil @@ -4024,49 +4347,29 @@ func (t *booleanGroupTable) advance() bool { return true } - // handle the group with aggregate case - var value bool - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determineBooleanAggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []bool{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - panic("unsupported for aggregate count: Boolean") - case datatypes.AggregateTypeSum: - panic("unsupported for aggregate sum: Boolean") - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -4079,6 +4382,86 @@ func (t *booleanGroupTable) advance() bool { return true } +type booleanAggregateMethod func([]int64, []bool) (int64, bool) + +// determineBooleanAggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determineBooleanAggregateMethod(agg datatypes.Aggregate_AggregateType) (booleanAggregateMethod, error) { + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroupsBoolean, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroupsBoolean, nil + case datatypes.AggregateTypeCount: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate count: Boolean", + } + + case datatypes.AggregateTypeSum: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate sum: Boolean", + } + + case datatypes.AggregateTypeMin: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate min: Boolean", + } + + case datatypes.AggregateTypeMax: + + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate max: Boolean", + } + + default: + return nil, &influxdb.Error{ + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. + +func aggregateFirstGroupsBoolean(timestamps []int64, values []bool) (int64, bool) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroupsBoolean(timestamps []int64, values []bool) (int64, bool) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *booleanGroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil diff --git a/storage/flux/table.gen.go.tmpl b/storage/flux/table.gen.go.tmpl index 18c13bca1cb..5df88708086 100644 --- a/storage/flux/table.gen.go.tmpl +++ b/storage/flux/table.gen.go.tmpl @@ -1,6 +1,7 @@ package storageflux import ( + "fmt" "math" "sync" @@ -742,49 +743,29 @@ func (t *{{.name}}GroupTable) advance() bool { return true } - // handle the group with aggregate case - var value {{.Type}} - // For group count, sum, min, and max, the timestamp here is always math.MaxInt64. - // their final result does not contain _time, so this timestamp value can be anything - // and it won't matter. - // For group first, we need to assign the initial value to math.MaxInt64 so - // we can find the row with the smallest timestamp. - // Do not worry about data with math.MaxInt64 as its real timestamp. - // In OSS we require a |> range() call in the query and a math.MaxInt64 timestamp - // cannot make it through. - var timestamp int64 = math.MaxInt64 - if t.gc.Aggregate().Type == datatypes.AggregateTypeLast { - timestamp = math.MinInt64 + aggregate, err := determine{{.Name}}AggregateMethod(t.gc.Aggregate().Type) + if err != nil { + t.err = err + return false } + + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps, values := []int64{ts}, []{{.Type}}{v} for { - // note that for the group aggregate case, len here should always be 1 - for i := 0; i < len; i++ { - switch t.gc.Aggregate().Type { - case datatypes.AggregateTypeCount: - {{if eq .Name "Integer"}}fallthrough{{else}}panic("unsupported for aggregate count: {{.Name}}"){{end}} - case datatypes.AggregateTypeSum: - {{if or (eq .Name "String") (eq .Name "Boolean")}}panic("unsupported for aggregate sum: {{.Name}}"){{else}}value += arr.Values[i]{{end}} - case datatypes.AggregateTypeFirst: - if arr.Timestamps[i] < timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - case datatypes.AggregateTypeLast: - if arr.Timestamps[i] > timestamp { - timestamp = arr.Timestamps[i] - value = arr.Values[i] - } - } - } arr = t.cur.Next() - len = arr.Len() - if len > 0 { + if arr.Len() > 0 { + ts, v := aggregate(arr.Timestamps, arr.Values) + timestamps = append(timestamps, ts) + values = append(values, v) continue } + if !t.advanceCursor() { break } } + timestamp, value := aggregate(timestamps, values) + colReader := t.allocateBuffer(1) if IsSelector(t.gc.Aggregate()) { colReader.cols[timeColIdx] = arrow.NewInt([]int64{timestamp}, t.alloc) @@ -797,6 +778,141 @@ func (t *{{.name}}GroupTable) advance() bool { return true } +type {{.name}}AggregateMethod func([]int64, []{{.Type}}) (int64, {{.Type}}) + +// determine{{.Name}}AggregateMethod returns the method for aggregating +// returned points within the same group. The incoming points are the +// ones returned for each series and the method returned here will +// aggregate the aggregates. +func determine{{.Name}}AggregateMethod(agg datatypes.Aggregate_AggregateType) ({{.name}}AggregateMethod, error){ + switch agg { + case datatypes.AggregateTypeFirst: + return aggregateFirstGroups{{.Name}}, nil + case datatypes.AggregateTypeLast: + return aggregateLastGroups{{.Name}}, nil + case datatypes.AggregateTypeCount: + {{if eq .Name "Integer"}} + return aggregateCountGroups{{.Name}}, nil + {{else}} + return nil, &influxdb.Error { + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate count: {{.Name}}", + } + {{end}} + case datatypes.AggregateTypeSum: + {{if and (ne .Name "Boolean") (ne .Name "String")}} + return aggregateSumGroups{{.Name}}, nil + {{else}} + return nil, &influxdb.Error { + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate sum: {{.Name}}", + } + {{end}} + case datatypes.AggregateTypeMin: + {{if and (ne .Name "Boolean") (ne .Name "String")}} + return aggregateMinGroups{{.Name}}, nil + {{else}} + return nil, &influxdb.Error { + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate min: {{.Name}}", + } + {{end}} + case datatypes.AggregateTypeMax: + {{if and (ne .Name "Boolean") (ne .Name "String")}} + return aggregateMaxGroups{{.Name}}, nil + {{else}} + return nil, &influxdb.Error { + Code: influxdb.EInvalid, + Msg: "unsupported for aggregate max: {{.Name}}", + } + {{end}} + default: + return nil, &influxdb.Error { + Code: influxdb.EInvalid, + Msg: fmt.Sprintf("unknown/unimplemented aggregate type: %v", agg), + } + } +} + +{{if and (ne .Name "Boolean") (ne .Name "String")}} +func aggregateMinGroups{{.Name}}(timestamps []int64, values []{{.Type}}) (int64, {{.Type}}) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value > values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} +{{end}} + +{{if and (ne .Name "Boolean") (ne .Name "String")}} +func aggregateMaxGroups{{.Name}}(timestamps []int64, values []{{.Type}}) (int64, {{.Type}}) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if value < values[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} +{{end}} + +// For group count and sum, the timestamp here is always math.MaxInt64. +// their final result does not contain _time, so this timestamp value can be anything +// and it won't matter. +{{if eq .Name "Integer"}} +func aggregateCountGroups{{.Name}}(timestamps []int64, values []{{.Type}}) (int64, {{.Type}}) { + return aggregateSumGroups{{.Name}}(timestamps, values) +} +{{end}} + +{{if and (ne .Name "Boolean") (ne .Name "String")}} +func aggregateSumGroups{{.Name}}(_ []int64, values []{{.Type}}) (int64, {{.Type}}) { + var sum {{.Type}} + for _, v := range values { + sum += v + } + return math.MaxInt64, sum +} +{{end}} + +func aggregateFirstGroups{{.Name}}(timestamps []int64, values []{{.Type}}) (int64, {{.Type}}) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp > timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + +func aggregateLastGroups{{.Name}}(timestamps []int64, values []{{.Type}}) (int64, {{.Type}}) { + value := values[0] + timestamp := timestamps[0] + + for i := 1; i < len(values); i++ { + if timestamp < timestamps[i] { + value = values[i] + timestamp = timestamps[i] + } + } + + return timestamp, value +} + func (t *{{.name}}GroupTable) advanceCursor() bool { t.cur.Close() t.cur = nil diff --git a/storage/flux/table.go b/storage/flux/table.go index ae921d91cc4..51738024474 100644 --- a/storage/flux/table.go +++ b/storage/flux/table.go @@ -71,7 +71,7 @@ func (t *table) isCancelled() bool { } func (t *table) init(advance func() bool) { - t.empty = !advance() + t.empty = !advance() && t.err == nil } func (t *table) do(f func(flux.ColReader) error, advance func() bool) error { @@ -82,6 +82,12 @@ func (t *table) do(f func(flux.ColReader) error, advance func() bool) error { } defer t.closeDone() + // If an error occurred during initialization, that is + // returned here. + if t.err != nil { + return t.err + } + if !t.Empty() { t.err = f(t.colBufs) t.colBufs.Release() diff --git a/storage/flux/table_test.go b/storage/flux/table_test.go index dc2423a617c..59a1776b4e3 100644 --- a/storage/flux/table_test.go +++ b/storage/flux/table_test.go @@ -2564,6 +2564,115 @@ func TestStorageReader_EmptyTableNoEmptyWindows(t *testing.T) { } } +func TestStorageReader_ReadGroup(t *testing.T) { + reader := NewStorageReader(t, func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { + spec := Spec(org, bucket, + MeasurementSpec("m0", + FloatArrayValuesSequence("f0", 10*time.Second, []float64{1.0, 2.0, 3.0, 4.0}), + TagValuesSequence("t0", "a-%s", 0, 3), + ), + ) + tr := TimeRange("2019-11-25T00:00:00Z", "2019-11-25T00:02:00Z") + return gen.NewSeriesGeneratorFromSpec(spec, tr), tr + }) + defer reader.Close() + + for _, tt := range []struct { + aggregate string + want flux.TableIterator + }{ + { + aggregate: storageflux.CountKind, + want: static.TableGroup{ + static.StringKey("_measurement", "m0"), + static.StringKey("_field", "f0"), + static.TimeKey("_start", "2019-11-25T00:00:00Z"), + static.TimeKey("_stop", "2019-11-25T00:02:00Z"), + static.TableMatrix{ + static.StringKeys("t0", "a-0", "a-1", "a-2"), + { + static.Table{ + static.Ints("_value", 12), + }, + }, + }, + }, + }, + { + aggregate: storageflux.SumKind, + want: static.TableGroup{ + static.StringKey("_measurement", "m0"), + static.StringKey("_field", "f0"), + static.TimeKey("_start", "2019-11-25T00:00:00Z"), + static.TimeKey("_stop", "2019-11-25T00:02:00Z"), + static.TableMatrix{ + static.StringKeys("t0", "a-0", "a-1", "a-2"), + { + static.Table{ + static.Floats("_value", 30), + }, + }, + }, + }, + }, + { + aggregate: storageflux.MinKind, + want: static.TableGroup{ + static.StringKey("_measurement", "m0"), + static.StringKey("_field", "f0"), + static.TimeKey("_start", "2019-11-25T00:00:00Z"), + static.TimeKey("_stop", "2019-11-25T00:02:00Z"), + static.TableMatrix{ + static.StringKeys("t0", "a-0", "a-1", "a-2"), + { + static.Table{ + static.Times("_time", "2019-11-25T00:00:00Z"), + static.Floats("_value", 1), + }, + }, + }, + }, + }, + { + aggregate: storageflux.MaxKind, + want: static.TableGroup{ + static.StringKey("_measurement", "m0"), + static.StringKey("_field", "f0"), + static.TimeKey("_start", "2019-11-25T00:00:00Z"), + static.TimeKey("_stop", "2019-11-25T00:02:00Z"), + static.TableMatrix{ + static.StringKeys("t0", "a-0", "a-1", "a-2"), + { + static.Table{ + static.Times("_time", "2019-11-25T00:00:30Z"), + static.Floats("_value", 4), + }, + }, + }, + }, + }, + } { + mem := &memory.Allocator{} + got, err := reader.ReadGroup(context.Background(), query.ReadGroupSpec{ + ReadFilterSpec: query.ReadFilterSpec{ + OrganizationID: reader.Org, + BucketID: reader.Bucket, + Bounds: reader.Bounds, + }, + GroupMode: query.GroupModeBy, + GroupKeys: []string{"_measurement", "_field", "t0"}, + AggregateMethod: tt.aggregate, + }, mem) + if err != nil { + t.Fatal(err) + } + + if diff := table.Diff(tt.want, got); diff != "" { + t.Errorf("unexpected results -want/+got:\n%s", diff) + } + } +} + func BenchmarkReadFilter(b *testing.B) { setupFn := func(org, bucket influxdb.ID) (gen.SeriesGenerator, gen.TimeRange) { tagsSpec := &gen.TagsSpec{ From 2397f7f5a0d32cc89afe0159a9a1b49826a70fab Mon Sep 17 00:00:00 2001 From: Ariel Salem Date: Wed, 5 Aug 2020 08:14:34 -0700 Subject: [PATCH 08/13] fix(dashboard-timezone-selection): toggling timezone should update queries timerange to respect timezone selection (#19146) --- CHANGELOG.md | 1 + ui/src/dashboards/selectors/index.test.ts | 102 +++++++++++++++++-- ui/src/dashboards/selectors/index.ts | 29 +++--- ui/src/dashboards/utils/getTimezoneOffset.ts | 13 +++ ui/src/shared/components/RefreshingView.tsx | 4 +- ui/src/shared/components/TimeSeries.tsx | 4 +- ui/src/timeMachine/components/Vis.tsx | 4 +- 7 files changed, 125 insertions(+), 32 deletions(-) create mode 100644 ui/src/dashboards/utils/getTimezoneOffset.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 83a2fc9a303..2289fd18331 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ 1. [19043](https://github.com/influxdata/influxdb/pull/19043): Enforce all influx CLI flag args are valid 1. [19188](https://github.com/influxdata/influxdb/pull/19188): Dashboard cells correctly map results when multiple queries exist +1. [19146](https://github.com/influxdata/influxdb/pull/19146): Dashboard cells and overlay use UTC as query time when toggling to UTC timezone ## v2.0.0-beta.15 [2020-07-23] diff --git a/ui/src/dashboards/selectors/index.test.ts b/ui/src/dashboards/selectors/index.test.ts index ad4fb07d78a..c89551942f8 100644 --- a/ui/src/dashboards/selectors/index.test.ts +++ b/ui/src/dashboards/selectors/index.test.ts @@ -1,9 +1,12 @@ // Funcs +import {mocked} from 'ts-jest/utils' import { getTimeRange, getTimeRangeWithTimezone, } from 'src/dashboards/selectors/index' -import moment from 'moment' +import {getTimezoneOffset} from 'src/dashboards/utils/getTimezoneOffset' + +jest.mock('src/dashboards/utils/getTimezoneOffset') // Types import {RangeState} from 'src/dashboards/reducers/ranges' @@ -30,22 +33,37 @@ const untypedGetTimeRangeWithTimeZone = getTimeRangeWithTimezone as (a: { }) => TimeRange describe('Dashboards.Selector', () => { + beforeEach(() => { + jest.clearAllMocks() + }) const dashboardIDs = [ '04c6f3976f4b8001', '04c6f3976f4b8000', '04c6f3976f4b8002', + '04c6f3976f4b8003', + '04c6f3976f4b8004', ] - const lower = `2020-05-05T10:00:00${moment().format('Z')}` - const upper = `2020-05-05T11:00:00${moment().format('Z')}` - const customTimeRange = { - lower, - upper, + const customTimeRangePST = { + lower: '2020-05-05T10:00:00-07:00', + upper: '2020-05-05T11:00:00-07:00', + type: 'custom', + } as CustomTimeRange + const customTimeRangeCET = { + lower: '2020-05-05T10:00:00+02:00', + upper: '2020-05-05T11:00:00+02:00', + type: 'custom', + } as CustomTimeRange + const customTimeRangeGMT = { + lower: '2020-05-05T10:00:00+00:00', + upper: '2020-05-05T11:00:00+00:00', type: 'custom', } as CustomTimeRange const ranges: RangeState = { [dashboardIDs[0]]: pastFifteenMinTimeRange, [dashboardIDs[1]]: pastHourTimeRange, - [dashboardIDs[2]]: customTimeRange, + [dashboardIDs[2]]: customTimeRangePST, + [dashboardIDs[3]]: customTimeRangeCET, + [dashboardIDs[4]]: customTimeRangeGMT, } it('should return the correct range when a matching dashboard ID is found', () => { @@ -72,7 +90,7 @@ describe('Dashboards.Selector', () => { ).toEqual(DEFAULT_TIME_RANGE) }) - it('should return the an unmodified version of the timeRange when the timeZone is local', () => { + it('should return an unmodified version of the timeRange when the timeZone is local', () => { const currentDashboard = {id: dashboardIDs[2]} const app: AppPresentationState = { ephemeral: { @@ -91,10 +109,10 @@ describe('Dashboards.Selector', () => { expect( untypedGetTimeRangeWithTimeZone({ranges, currentDashboard, app}) - ).toEqual(customTimeRange) + ).toEqual(customTimeRangePST) }) - it('should return the timeRange for the same hour with a UTC timezone when the timeZone is UTC', () => { + it('should return the timeRange for the same hour with a UTC timezone when the timeZone is UTC and the locale is 7 timezones behind UTC', () => { const currentDashboard = {id: dashboardIDs[2]} const app: AppPresentationState = { @@ -117,6 +135,70 @@ describe('Dashboards.Selector', () => { upper: `2020-05-05T11:00:00Z`, type: 'custom', } + // Offset for PST + mocked(getTimezoneOffset).mockImplementation(() => 420) + + expect( + untypedGetTimeRangeWithTimeZone({ranges, currentDashboard, app}) + ).toEqual(expected) + }) + + it('should return the timeRange for the same hour with a UTC timezone when the timeZone is UTC and the locale is 2 timezones ahead of UTC', () => { + const currentDashboard = {id: dashboardIDs[3]} + + const app: AppPresentationState = { + ephemeral: { + inPresentationMode: false, + hasUpdatedTimeRangeInVEO: false, + }, + persisted: { + autoRefresh: 0, + showTemplateControlBar: false, + navBarState: 'expanded', + notebookMiniMapState: 'expanded', + timeZone: 'UTC' as TimeZone, + theme: 'dark', + }, + } + + const expected = { + lower: `2020-05-05T10:00:00Z`, + upper: `2020-05-05T11:00:00Z`, + type: 'custom', + } + // Offset for CET + mocked(getTimezoneOffset).mockImplementation(() => -120) + + expect( + untypedGetTimeRangeWithTimeZone({ranges, currentDashboard, app}) + ).toEqual(expected) + }) + + it('should return the timeRange when the timezone has no offset', () => { + const currentDashboard = {id: dashboardIDs[4]} + + const app: AppPresentationState = { + ephemeral: { + inPresentationMode: false, + hasUpdatedTimeRangeInVEO: false, + }, + persisted: { + autoRefresh: 0, + showTemplateControlBar: false, + navBarState: 'expanded', + notebookMiniMapState: 'expanded', + timeZone: 'UTC' as TimeZone, + theme: 'dark', + }, + } + + const expected = { + lower: `2020-05-05T10:00:00Z`, + upper: `2020-05-05T11:00:00Z`, + type: 'custom', + } + + mocked(getTimezoneOffset).mockImplementation(() => 0) expect( untypedGetTimeRangeWithTimeZone({ranges, currentDashboard, app}) diff --git a/ui/src/dashboards/selectors/index.ts b/ui/src/dashboards/selectors/index.ts index f30bfc07e13..a275b6c7f3f 100644 --- a/ui/src/dashboards/selectors/index.ts +++ b/ui/src/dashboards/selectors/index.ts @@ -1,5 +1,8 @@ +// Libraries import {get} from 'lodash' import moment from 'moment' + +// Types import { AppState, Check, @@ -9,7 +12,10 @@ import { View, ViewType, } from 'src/types' + +// Utility import {currentContext} from 'src/shared/selectors/currentContext' +import {getTimezoneOffset} from 'src/dashboards/utils/getTimezoneOffset' // Constants import {DEFAULT_TIME_RANGE} from 'src/shared/constants/timeRanges' @@ -53,25 +59,16 @@ export const isCurrentPageDashboard = (state: AppState): boolean => // from the local time to the same time in UTC if UTC is selected from the // timezone dropdown. This is feature was original requested here: // https://github.com/influxdata/influxdb/issues/17877 +// and finalized across the dashboards & the data explorer here: +// https://github.com/influxdata/influxdb/pull/19146 // Example: user selected 10-11:00am and sets the dropdown to UTC // Query should run against 10-11:00am UTC rather than querying // 10-11:00am local time (offset depending on timezone) -export const setTimeToUTC = (date: string): string => { - const offset = new Date(date).getTimezoneOffset() - if (offset > 0) { - return moment - .utc(date) - .subtract(offset, 'minutes') - .format() - } - if (offset < 0) { - return moment - .utc(date) - .add(offset, 'minutes') - .format() - } - return moment.utc(date).format() -} +export const setTimeToUTC = (date: string): string => + moment + .utc(date) + .subtract(getTimezoneOffset(), 'minutes') + .format() export const getTimeZone = (state: AppState): TimeZone => { return state.app.persisted.timeZone || 'Local' diff --git a/ui/src/dashboards/utils/getTimezoneOffset.ts b/ui/src/dashboards/utils/getTimezoneOffset.ts new file mode 100644 index 00000000000..b9e32e726d1 --- /dev/null +++ b/ui/src/dashboards/utils/getTimezoneOffset.ts @@ -0,0 +1,13 @@ +/** + * This files has been created as a way to effectively test + * the getTimeRangeWithTimezone function since current system (circleCI, Jenkins) + * and JS Date limitations prevent us from fully testing out its dependent functions + * + * It should be noted that the native getTimezoneOffset function returns a number + * that represents the number of minutes (not hours) the "local" timezone is offset + * where locations West of UTC are positive (+420) and locations East of UTC are negative (-120): + * + * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/getTimezoneOffset + **/ + +export const getTimezoneOffset = (): number => new Date().getTimezoneOffset() diff --git a/ui/src/shared/components/RefreshingView.tsx b/ui/src/shared/components/RefreshingView.tsx index acbb12b756f..a69f83f3c9b 100644 --- a/ui/src/shared/components/RefreshingView.tsx +++ b/ui/src/shared/components/RefreshingView.tsx @@ -12,7 +12,7 @@ import CellEvent from 'src/perf/components/CellEvent' // Utils import {GlobalAutoRefresher} from 'src/utils/AutoRefresher' -import {getTimeRange} from 'src/dashboards/selectors' +import {getTimeRangeWithTimezone} from 'src/dashboards/selectors' import {checkResultsLength} from 'src/shared/utils/vis' import {getActiveTimeRange} from 'src/timeMachine/selectors/index' @@ -147,7 +147,7 @@ class RefreshingView extends PureComponent { } const mstp = (state: AppState, ownProps: OwnProps) => { - const timeRange = getTimeRange(state) + const timeRange = getTimeRangeWithTimezone(state) const ranges = getActiveTimeRange(timeRange, ownProps.properties.queries) const {timeZone, theme} = state.app.persisted diff --git a/ui/src/shared/components/TimeSeries.tsx b/ui/src/shared/components/TimeSeries.tsx index eb001534bbc..a0583f34f3c 100644 --- a/ui/src/shared/components/TimeSeries.tsx +++ b/ui/src/shared/components/TimeSeries.tsx @@ -17,7 +17,7 @@ import {getCachedResultsThunk} from 'src/shared/apis/queryCache' // Utils import { - getTimeRange, + getTimeRangeWithTimezone, isCurrentPageDashboard as isCurrentPageDashboardSelector, } from 'src/dashboards/selectors' import {getVariables, asAssignment} from 'src/variables/selectors' @@ -369,7 +369,7 @@ class TimeSeries extends Component { } const mstp = (state: AppState, props: OwnProps) => { - const timeRange = getTimeRange(state) + const timeRange = getTimeRangeWithTimezone(state) // NOTE: cannot use getAllVariables here because the TimeSeries // component appends it automatically. That should be fixed diff --git a/ui/src/timeMachine/components/Vis.tsx b/ui/src/timeMachine/components/Vis.tsx index 4f76a126093..11dcbbba326 100644 --- a/ui/src/timeMachine/components/Vis.tsx +++ b/ui/src/timeMachine/components/Vis.tsx @@ -21,7 +21,7 @@ import { getFillColumnsSelection, getSymbolColumnsSelection, } from 'src/timeMachine/selectors' -import {getTimeRange, getTimeZone} from 'src/dashboards/selectors' +import {getTimeRangeWithTimezone, getTimeZone} from 'src/dashboards/selectors' // Types import {RemoteDataState, AppState} from 'src/types' @@ -126,7 +126,7 @@ const mstp = (state: AppState) => { statuses, }, } = activeTimeMachine - const timeRange = getTimeRange(state) + const timeRange = getTimeRangeWithTimezone(state) const { alertBuilder: {type: checkType, thresholds: checkThresholds}, } = state From 08cfb3f773aa021d2a92036ba1eaba19eb4a3719 Mon Sep 17 00:00:00 2001 From: Adrian Thurston Date: Wed, 5 Aug 2020 18:14:53 +0300 Subject: [PATCH 09/13] feat: allow a metadata key to be required for a query to be logged (#19200) Can specify that a key must be present in the query response metadata before LoggingProxyQueryService logs the query. Will use this in gateway to only log the query when the connection to queryd fails. --- query/logging.go | 18 ++++++++++++++++++ query/logging_test.go | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+) diff --git a/query/logging.go b/query/logging.go index 587220261ab..7c803d514e4 100644 --- a/query/logging.go +++ b/query/logging.go @@ -22,6 +22,10 @@ type LoggingProxyQueryService struct { nowFunction func() time.Time log *zap.Logger cond func(ctx context.Context) bool + + // If this is set then logging happens only if this key is present in the + // metadata. + requireMetadataKey string } // LoggingProxyQueryServiceOption provides a way to modify the @@ -37,6 +41,12 @@ func ConditionalLogging(cond func(context.Context) bool) LoggingProxyQueryServic } } +func RequireMetadataKey(metadataKey string) LoggingProxyQueryServiceOption { + return func(lpqs *LoggingProxyQueryService) { + lpqs.requireMetadataKey = metadataKey + } +} + func NewLoggingProxyQueryService(log *zap.Logger, queryLogger Logger, proxyQueryService ProxyQueryService, opts ...LoggingProxyQueryServiceOption) *LoggingProxyQueryService { lpqs := &LoggingProxyQueryService{ proxyQueryService: proxyQueryService, @@ -76,6 +86,14 @@ func (s *LoggingProxyQueryService) Query(ctx context.Context, w io.Writer, req * entry.Write(zap.Error(err)) } } + + // Enforce requireMetadataKey, if set. + if s.requireMetadataKey != "" { + if _, ok := stats.Metadata[s.requireMetadataKey]; !ok { + return + } + } + traceID, sampled, _ := tracing.InfoFromContext(ctx) log := Log{ OrganizationID: req.Request.OrganizationID, diff --git a/query/logging_test.go b/query/logging_test.go index 71f38604d39..823660f2ff3 100644 --- a/query/logging_test.go +++ b/query/logging_test.go @@ -11,6 +11,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/influxdata/flux" + "github.com/influxdata/flux/metadata" platform "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/query" "github.com/influxdata/influxdb/v2/query/mock" @@ -58,7 +59,9 @@ func TestLoggingProxyQueryService(t *testing.T) { ExecuteDuration: time.Second, Concurrency: 2, MaxAllocated: 2048, + Metadata: make(metadata.Metadata), } + wantStats.Metadata.Add("some-mock-metadata", 42) wantBytes := 10 pqs := &mock.ProxyQueryService{ QueryF: func(ctx context.Context, w io.Writer, req *query.ProxyRequest) (flux.Statistics, error) { @@ -146,4 +149,34 @@ func TestLoggingProxyQueryService(t *testing.T) { t.Fatal("expected query service to log") } }) + + t.Run("require metadata key", func(t *testing.T) { + defer func() { + logs = nil + }() + + reqMeta1 := query.RequireMetadataKey("this-metadata-wont-be-found") + lpqs1 := query.NewLoggingProxyQueryService(zap.NewNop(), logger, pqs, reqMeta1) + + _, err := lpqs1.Query(context.Background(), ioutil.Discard, req) + if err != nil { + t.Fatal(err) + } + + if len(logs) != 0 { + t.Fatal("expected query service not to log") + } + + reqMeta2 := query.RequireMetadataKey("some-mock-metadata") + lpqs2 := query.NewLoggingProxyQueryService(zap.NewNop(), logger, pqs, reqMeta2) + + _, err = lpqs2.Query(context.Background(), ioutil.Discard, req) + if err != nil { + t.Fatal(err) + } + + if len(logs) != 1 { + t.Fatal("expected query service to log") + } + }) } From 79f6196829b1ae9303e6bae31f02a8352de7ab75 Mon Sep 17 00:00:00 2001 From: karel-rehor Date: Wed, 5 Aug 2020 17:22:09 +0200 Subject: [PATCH 10/13] chore(e2e): update selectors and add containerization (#19221) * update: patch in tests from bonitoo-io/selenium-accept-infl2 * fix: add dataExplorer.feature - forgotten yesterday * update: refactor e2e test configuration * update: set actual cloud credentials in config * fix: issue with headless switch in config * update: WIP cloud experim and refactoring to use client libs * update: sync variablesTab with upstream changes * fix: clumsy use of resultObserver without promise * fix: troubleshoot merge issue * wip: Refactor using client APIs. * wip: refactoring tests with new client API * update: tokens tests * wip: fixing variables tests * fix: selectors for settings variables tests * update: update selectors for dashboards and cellEdit * e2e(fix): update selectors for generic popup * e2e(fix): fix note cell test * e2e(circleci): try and force nightly build * e2e(circleci): try and force nightly build bis * e2e(circleci): troubleshoot build issues * e2e(circleci): fix type * e2e(circleci): check circleci build * e2e(circleci): troubleshoot build failure * e2e(circleci): troubleshout circleci build * e2e(circleci): troubleshoot circleci build issues * e2e(circleci): troubleshoot build schedule * e2e(update): wip updating alerts selectors * e2e(circleci): troubleshoot reporting from tests * e2e(update): fix alert selectors - WIP * e2e(circleci): troubleshoot reporting issues in circleci * e2e(update): monitoring selector fixes * e2e(circleci): troubleshoot reporting issues * e2e(update): fix monitoring selectors * e2e(circleci): change cron trigger schedule for bonitoo * e2e(config): add ENV to declare username * test: add semantic commit * e2e: remove debug lines from old config.yml * fix(temp): remove bonitoo workflow from config.yml * fix(e2e): reset cron for hourly-e2e to original values * dataExplorer.feature new scenarios * fix(e2e): generalize urls in telegraf asserts * fix(e2e): troubleshoot circleci build * fix: update .circleci/config.yml nightly acceptance test and sync with upstream * fix(e2e): troubleshoot network issues on jenkins * fix(e2): troubleshoot jenkins failures * fix(e2e): troubleshoot jenkins issues * fix(e2e): troubleshoot jenkins failures * fix(e2e): troubleshoot test issues - change page load strategy * fix(e2e): troubleshoot build issues * fix(e2e): troubleshoot build issues * fix(e2e): add paranoia checks on using UI signin * fix(e2e): troubleshoot test issues * fix(e2e): change handling of env params * fix(e2e): troubleshoot signin stale elem issue * fix(e2e): typo * fix(e2e): synch selectors with recent UI changes * fix(e2e): update check for stale element on signin * fix(e2e): fine tune retry on stale element signin * fix(e2e): lengthen signin retry * fix(e2): try second stale element workaround * fix(e2): attempt to workaround stale element * fix(e2): try cruder solution to stale element on sign in * fix(e2): lengthen timeout for signing page load * fix(e2e): clear notifications before save in dataexplorer * fixes(e2e): update variables button selector * fix(e2e): cleanup data explorer test variable - from browser * fixes(e2e): stash experim changes to cucumber.js * fixes(e2e): troubleshoot deadman check failure * fix(e2e): stash cucumber.js * fix(e2e): update selectors for telegraf delete * fixes(e2e): extend timeout for input to monaco * fixes(e2e): troubleshoot timeout issue on paste into monaco * fixes(e2e): safety commit containerize * fixes(e2e): generic page header no longer applies - temp fix * fixes(e2e): changes to headers temp fixes * chore(e2e): stash acceptance test containerize work * chore(e2e): clean debug messages * feat(e2e): commandline args in containerized test script * fixes(e2e): add junit report generation to container script * fixes(e2e): set delay for selenoid startup * fixes(e2e): stop docker containers after tests * fix(e2e): drop bonitoo argv args before cucumber calls ArgvParser * chore(e2e): update README and remove experim naming * fix(e2e): troubleshoot occasional flake in notification assertions * feat(e2e): add simple performance utility for cloud * feat(e2e): tweek performance util * feat(e2e): start cloud testing with containerized tests * fix(e2e): halt script on unset required params * fix(e2e): remove messages that could leak info * fix(e2e): troubleshoot jenkins credentials * fix(e2e): troubleshoot parameter issues in jenkins * fix(e2e): cleanup troubleshoot code * fix(e2e): update create variables popup selector * fix(e2e): update submit button for template popup * fix(e2e): update selectors for data tokens tests * fix(e2e): sync alerts tests with recent changes * chore(e2e): add cloud specific Dockerfile * chore(e2e): add reporting to cloud dockerfile Co-authored-by: Cubone21 --- e2e/Makefile | 8 + e2e/README.md | 160 +++++++++++++++++- .../features/signin/01_signinPerf.feature | 24 +++ .../features/signin/02_signinCloud.feature | 20 +++ e2e/cucumber.js | 9 +- e2e/e2e.conf.json | 6 +- e2e/etc/selenoid/config/browsers.json | 48 ++++++ e2e/features/dashboards/cellEdit.feature | 2 +- e2e/features/dashboards/dashboards.feature | 2 +- e2e/features/settings/templates.feature | 6 +- e2e/hooks.js | 11 +- e2e/scripts/Dockerfile.cloud | 15 ++ e2e/scripts/Dockerfile.tests | 14 ++ e2e/scripts/cloud-test.sh | 6 + e2e/scripts/containerTests.sh | 140 +++++++++++++++ e2e/scripts/selenoid.sh | 25 +++ e2e/src/pages/basePage.js | 2 +- e2e/src/pages/cloud/cloudLoginPage.js | 25 ++- e2e/src/pages/home/homePage.js | 2 +- e2e/src/pages/loadData/tokensTab.js | 32 ++-- e2e/src/pages/monitoring/alertsPage.js | 6 +- e2e/src/pages/settings/templatesTab.js | 5 + e2e/src/pages/settings/variablesTab.js | 2 +- .../step_definitions/common/cloudStepDefs.js | 26 +++ .../step_definitions/common/commonStepDefs.js | 4 +- .../step_definitions/influx/influxStepDefs.js | 4 + .../settings/templatesStepDefs.js | 4 + e2e/src/steps/baseSteps.js | 24 ++- e2e/src/steps/cloudSteps.js | 60 +++++++ e2e/src/steps/dashboards/cellOverlaySteps.js | 3 + e2e/src/steps/influx/influxSteps.js | 4 + e2e/src/steps/loadData/telegrafsSteps.js | 4 +- e2e/src/steps/loadData/tokensSteps.js | 2 +- e2e/src/steps/monitoring/checkEditSteps.js | 10 +- e2e/src/steps/settings/templatesSteps.js | 10 +- e2e/src/utils/influxUtils.js | 132 +++++++++++---- e2e/src/utils/performanceUtils.js | 149 ++++++++++++++++ 37 files changed, 924 insertions(+), 82 deletions(-) create mode 100644 e2e/cloud/features/signin/01_signinPerf.feature create mode 100644 e2e/cloud/features/signin/02_signinCloud.feature create mode 100644 e2e/etc/selenoid/config/browsers.json create mode 100644 e2e/scripts/Dockerfile.cloud create mode 100644 e2e/scripts/Dockerfile.tests create mode 100755 e2e/scripts/cloud-test.sh create mode 100755 e2e/scripts/containerTests.sh create mode 100755 e2e/scripts/selenoid.sh create mode 100644 e2e/src/utils/performanceUtils.js diff --git a/e2e/Makefile b/e2e/Makefile index 4a49641eff6..7ea930cd519 100644 --- a/e2e/Makefile +++ b/e2e/Makefile @@ -25,3 +25,11 @@ test: docker-test docker-report clean: docker rm -f test-influxdb rm -rf /tmp/report + +bonitoo-prep: + npm run influx:setup + +bonitoo-docker: + ./scripts/containerTests.sh + +bonitoo-test: bonitoo-prep bonitoo-docker diff --git a/e2e/README.md b/e2e/README.md index 1591199883c..3f80a090584 100644 --- a/e2e/README.md +++ b/e2e/README.md @@ -1,9 +1,11 @@ ## Selenium-Accept -Selenium Acceptance tests for the Influxdbv2 UI. +Selenium Acceptance tests for the Influxdbv2 UI. These tests were initially intended to support nightly testing of the open source (OSS) poduct. They are currently (7. 2020) being leveraged to support synthetic testing of the cloud deployments. This leveraging has lead to some substantial changes and improvements in configuration. **Run cycle** +*original OSS* + ```bash npm install npm run influx:setup @@ -14,7 +16,156 @@ node src/utils/junitReport.js Note that the final two reporting steps can be bundled into `package.json` scripts, or can be called as a part of the `package.json` *test* script. They are shown here to show how third party components are used to generate reports. +To clean the reports and screenshots directories: + +```bash +npm run clean +``` + +### Configuration + +Tests are configured in the file `e2e.conf.json`. The configuration to be used for a test run is specified +by the property `active` in this file. This can be overridden on the commandline. + +**Command Line Arguments** + +The following command line arguments are detected at load time. + + * `headless` - boolean. Whether to run the tests headless or not. + * `sel_docker` or `selDocker` - boolean. Added for influxdata docker tests. Chooses options needed for running tests in the influxdata docker container. + * `active_conf` or `activeConf` - string. Declare the configuration to be activated for the test run. Must match a config declared in `e2e.conf.json`. + +For example to run the dataexplorer feature `headless` against the `nightly` build configuration: + +```bash +npm test headless=true activeConf=nightly -- features/dataExplorer/dataExplorer.feature +``` + +Another example to test the signin feature using _tags_, `headless` and against the `nighty` configuration. + +```bash +npm test -- headless=true activeConf=nightly -t "@feature-signin" +``` + +**Environment Variable Overrides** + +Configuration properties can be overridden by `E2E` environment variables. The basic pattern for exporting an enviornment variable to be picked up for test configuration is the token `E2E` followed by an underscore, then the path to the property to be modified as defined by the configuration tree in `e2e.conf.json`. Each node in the tree is declared in uppercase and separated by an underscore. + +For example, to declare the `influx_url` property in the `development` configuration export the environment variable `E2E_DEVELOPMENT_INFLUX_URL`. + +e.g. + +`export E2E_DEVELOPMENT_INFLUX_URL="http://172.17.0.2:9999"` + +This feature was added specifically to define passwords and usernames only via the test environment. However, it can be used with any configuration key value such as the INFLUX_URL endpoint. + +e.g. + +```bash +export E2E_NIGHTLY_DEFAULT_USER_USERNAME=hibou +export E2E_NIGHTLY_DEFAULT_USER_PASSWORD=HuluHulu +``` + +**Note** - if password or token values in the configuration file are declared with the value _"ENV"_ then they must be defined by an environment variable as shown above. Failure to define the values in the environment will lead to 400 bad request and similar errors, for example when creating users at the start of a test feature. + +**User Generation** + +In the configration file the key `create_method` defines the method to be used to create the user and associated artifacts. Four values for this key are recognized. + + * `REST` - use the rest endpoint to create the user directly. Recommended. + * `CLI` - use the command line client to create the user. Experimental. **_CAUTION_** Deletes the file `${USER_HOME}/.influxdbv2/configs` and regenerates it. Depends on additional special configuration keys: + * `influx_path` - path to the `influx` client executable, used to create the user. + * `CLI_DOCKER` - use the command line client inside the docker container. Experimental. Depends on additional special configuration keys: + * `docker_name` - name of the docker container in which influxdbv2 is running. + * `SKIP` - skip user creation. It is assumed the user account will already exist in the deployment under test. + +Note that in the Containerized version described below, the `CLI` and `CLI_DOCKER` user generation modes do not currently (7.2020) work. + +### Containerized test runs against Selenoid + +In June 2020 scripts were prepared to containerize the tests using a standard node version 12 container from docker and Selenoid browser containers. This approach should make it easier to port the test infrastructure into other CI pipelines as well as to expand browser coverage. + +The entire process is defined in the script `./scripts/containerTests.sh`. + +This script does the following: + 1. Stops and tears down any existing docker container containing these tests. + 1. Stops and tears down any running Selenoid containers. + 1. Restarts Selenoid via the script `./scripts/selenoid.sh`. + 1. Rebuilds the docker image containing these tests. Note this is currently (7.2020) based on the standard docker nodejs v 12.16 image and uses `./scripts/Dokcerfile.tests`. + 1. Executes the tests based on tag definitions passed to the script using the argument `-t` or `--tags` (defaults to "@influx-influx") and using the configuration passed through the optional argument `-c` or `--config` (defaults to "development"). + +Examples + +```bash +$ scripts/containerTests.sh --tags "not @download-file" +$ scripts/containerTests.sh --tags "@feature-signin" +``` + +**Mapping directories between containers** + +Note that this script maps the `etc` directory containing test data and upload files by linking it to the system `/tmp` directory and then mapping that volume into the docker containers. This solution will not work when selenoid and test containers are not hosted on the same machine. + +**Debugging Selenoid** + +Debugging Selenoid tests entails starting the `selenoid-ui` container and then accessing it at `http://localhost:8080/#/`. + +```bash +scripts/selenoid.sh -debug +``` + +_Do not_ run the `containerTests.sh` script. Instead start the test as it is started in that script. + +e.g. +```bash +sudo docker run -it -v `pwd`/report:/home/e2e/report -v `pwd`/screenshots:/home/e2e/screenshots \ + -v /tmp/e2e/etc:/home/e2e/etc -v /tmp/e2e/downloads:/home/e2e/downloads \ + -e SELENIUM_REMOTE_URL="http://${SELENOID_HOST}:4444/wd/hub" \ + -e E2E_${ACTIVE_CONF^^}_INFLUX_URL="http://${INFLUX2_HOST}:9999" --detach \ + --name ${TEST_CONTAINER} e2e-${TEST_CONTAINER}:latest +``` + +Then run tests against it. + +```bash +sudo docker exec ${TEST_CONTAINER} npm test -- activeConf=${ACTIVE_CONF} --tags "$TAGS" +``` + +Test runs can then be monitored through the Selenoid UI. Open the active session and log and VNC windows will open. + +**Video with Selenoid** + +It is also possible to record an MP4 of a debug test run. In the script `selenoid.sh` uncomment the following line: + +```bash +sudo docker pull selenoid/video-recorder:latest-release +``` + +And for now, in `cucumber.js` uncomment the line: + +```javascript +caps.set('enableVideo', true); +``` + +Then rerun the script `selenoid.sh` with the argument `-debug`. + +### Light Weight Perfomance checks + +For tests against the cloud a light weigh perfomance utility has been added. It currently exports only one method for tests: `execTimed( func, maxDelay, failMsg, successMsg)`. This method will execute the passed function and expect it to resolve by `maxDelay` milliseconds. Failures are thrown to cucumber and results are stored in a performance log buffer. This log is then dumped to the console after all tests have been run. They are also written to a CSV report file: `./report/performance.csv`. + +For example, here is how it is used to check the redirect to the login page. + +```javascript + async openCloudPage(maxDelay){ + await perfUtils.execTimed(async () => { + await this.driver.get(__config.influx_url); + await this.loginPage.waitToLoad(10000); + }, + maxDelay, `Redirect failed to resolve in ${maxDelay} ms`); + } + +``` + ### Tips Run only the feature under development @@ -23,8 +174,13 @@ Run only the feature under development npm test -- features/onboarding/onboarding.feature:4 ``` -Number is line number where the target scenario starts. +Number is line number where the target scenario starts. To run the whole featre it can be omitted. +Run the same headless + +```bash +npm test headless=true -- features/onboarding/onboarding.feature:4 +``` ### API Notes diff --git a/e2e/cloud/features/signin/01_signinPerf.feature b/e2e/cloud/features/signin/01_signinPerf.feature new file mode 100644 index 00000000000..3935d049219 --- /dev/null +++ b/e2e/cloud/features/signin/01_signinPerf.feature @@ -0,0 +1,24 @@ +Feature: Cloud Signin - Perf + Check basic loading times of cloud login + + @perf-test + @cloud-test + Scenario: Perf - redirect to login + When open the cloud page in "3000" milliseconds + + @perf-test + @cloud-test + Scenario: Perf: login to home + When I open the cloud login + When log in to the cloud in "3000" milliseconds + + @perf-test + @cloud-test + Scenario: Perf - logout to account + # N.B. on free account first logout should load account info + When I logout to account info in "3000" milliseconds + + @perf-test + @cloud-test + Scenario: Perf - logout to login + When I logout to login page in "3000" milliseconds diff --git a/e2e/cloud/features/signin/02_signinCloud.feature b/e2e/cloud/features/signin/02_signinCloud.feature new file mode 100644 index 00000000000..cd6ff96a31e --- /dev/null +++ b/e2e/cloud/features/signin/02_signinCloud.feature @@ -0,0 +1,20 @@ +Feature: Cloud Signin - Func + Use and abuse the cloud signin page + +@cloud-test +Scenario: login to cloud password + When I open the cloud login + When log in to the cloud + Then the home page is loaded + +@cloud-test +Scenario: logout influx menu + When click nav menu item "User" + When click user nav item "Logout" + # Need second click - 1st should lead to account info + # TODO update when account info page loads - issue #19057 + When click nav menu item "User" + When click user nav item "Logout" + Then the cloud login page is loaded + +#Scenario: excersize login page diff --git a/e2e/cucumber.js b/e2e/cucumber.js index a9d8635cd45..b88e057b681 100644 --- a/e2e/cucumber.js +++ b/e2e/cucumber.js @@ -1,7 +1,7 @@ const chrome = require('selenium-webdriver/chrome'); const ffox = require('selenium-webdriver/firefox'); const fs = require('fs'); -const {Builder, Capabilities, By, Key, logging, PageLoadStrategy, promise, until} = require('selenium-webdriver'); +const {Builder, Capabilities, By, Key, LocalFileDetector, logging, PageLoadStrategy, promise, until} = require('selenium-webdriver'); //following provides cleaner paths in require statements global.__basedir = __dirname; global.__srcdir = __dirname + "/src"; @@ -24,11 +24,15 @@ fs.mkdirSync(__screenShotDir, { recursive: true }); var common = '--require "src/step_definitions/**/*.js" --require hooks.js --require-module babel-core/register '; let caps = new Capabilities(); +caps.set('enableVNC', true); +//caps.set('enableVideo', true); caps.set('pageLoadStrategy', 'normal'); -let chromeUserPreferences = { 'download.prompt_for_download': false, "download.default_directory": __basedir }; +let chromeUserPreferences = { 'download.prompt_for_download': false, "download.default_directory": __config.download_dir }; let windowSize = { "width": 1024, "height": 768 }; +console.log("DEBUG chromeUserPreferences " + JSON.stringify(chromeUserPreferences)); + if(__config.window_size){ windowSize.width = parseInt(__config.window_size.width); windowSize.height = parseInt(__config.window_size.height); @@ -93,6 +97,7 @@ if(__config.headless) { } __wdriver.manage().setTimeouts({implicit: 3000}); +//__wdriver.setFileDetector(LocalFileDetector); __wdriver.executor_.w3c = true; console.log("DEBUG __wdriver: " + JSON.stringify(__wdriver)); diff --git a/e2e/e2e.conf.json b/e2e/e2e.conf.json index d3a3c75a2ad..64be3dffb16 100644 --- a/e2e/e2e.conf.json +++ b/e2e/e2e.conf.json @@ -31,12 +31,16 @@ "influx_url": "http://localhost:9999", "def_ctx": "/", "headless": true, + "sel_docker": false, + "deployment": "nightly_docker", "browser": "chrome", "screenshot_dir": "screenshots", - "deployment": "local_build", "create_method": "CLI_DOCKER", "influx_path": "../bin/linux/influx", "docker_name": "influx2_solo", + "influxdb": { + "version" : "2.0.0" + }, "default_user": { "username": "hibou", "password": "ENV", diff --git a/e2e/etc/selenoid/config/browsers.json b/e2e/etc/selenoid/config/browsers.json new file mode 100644 index 00000000000..bb94aa5c555 --- /dev/null +++ b/e2e/etc/selenoid/config/browsers.json @@ -0,0 +1,48 @@ +{ + "chrome": { + "default": "83.0", + "versions": { + "81.0": { + "image": "selenoid/chrome:81.0", + "port": "4444", + "path": "/" + }, + "83.0": { + "image": "selenoid/vnc:chrome_83.0", + "volumes": [ "/tmp/e2e/etc:/home/e2e/etc", "/tmp/e2e/downloads:/home/selenium/Downloads" ], + "port": "4444", + "path": "/" + } + } + }, + "firefox": { + "default": "77.0", + "versions": { + "76.0": { + "image": "selenoid/firefox:76.0", + "port": "4444", + "path": "/wd/hub" + }, + "77.0": { + "image": "selenoid/firefox:77.0", + "port": "4444", + "path": "/wd/hub" + } + } + }, + "opera": { + "default": "68.0", + "versions": { + "67.0": { + "image": "selenoid/opera:67.0", + "port": "4444", + "path": "/" + }, + "68.0": { + "image": "selenoid/opera:68.0", + "port": "4444", + "path": "/" + } + } + } +} diff --git a/e2e/features/dashboards/cellEdit.feature b/e2e/features/dashboards/cellEdit.feature index 1a2f134b538..1a274309027 100644 --- a/e2e/features/dashboards/cellEdit.feature +++ b/e2e/features/dashboards/cellEdit.feature @@ -621,7 +621,7 @@ Feature: Dashboards - Dashboard - Cell Edit Then the cell named "Kliky" contains a graph -@error-collateral +@error-collateral @download-file Scenario: Download results as CSV When remove files ".*chronograf_data.csv" if exists When toggle context menu of dashboard cell named "Kliky" diff --git a/e2e/features/dashboards/dashboards.feature b/e2e/features/dashboards/dashboards.feature index 1e1478fff13..e47356928cd 100644 --- a/e2e/features/dashboards/dashboards.feature +++ b/e2e/features/dashboards/dashboards.feature @@ -256,7 +256,7 @@ Feature: Dashboards - Base When click the Export Dashboard dismiss button Then popup is not loaded -@error-collateral +@error-collateral @download-file Scenario: Export Dashboard to file When remove file "tau_ceti.json" if exists When hover over dashboard card named "Tau Ceti" diff --git a/e2e/features/settings/templates.feature b/e2e/features/settings/templates.feature index 1c0efefb8c0..4e04e4669b2 100644 --- a/e2e/features/settings/templates.feature +++ b/e2e/features/settings/templates.feature @@ -59,7 +59,7 @@ Feature: Settings - Templates When click user templates When click header import template button When upload the template file "" - When click popup submit button + When click import template popup submit button Then popup is not loaded Then the success notification contains "Successfully imported template." When close all notifications @@ -81,7 +81,7 @@ Feature: Settings - Templates When click header import template button Then click the import template paste button When paste contents of "" to template textarea - When click popup submit button + When click import template popup submit button Then popup is not loaded Then the success notification contains "Successfully imported template." When close all notifications @@ -103,7 +103,7 @@ Feature: Settings - Templates When click user templates When click header import template button When upload the template file "etc/test-data/bad-template.json" - When click popup submit button + When click import template popup submit button Then popup is not loaded Then the error notification contains "Failed to import template: Error: Request failed with status code 400" When close all notifications diff --git a/e2e/hooks.js b/e2e/hooks.js index d6b0a4989f3..2245e848dd9 100644 --- a/e2e/hooks.js +++ b/e2e/hooks.js @@ -1,6 +1,7 @@ const fs = require('fs') -var {Before, BeforeAll, After, AfterAll, Status} = require('cucumber') +var {Before, After, AfterAll, Status} = require('cucumber'); var {logging} = require('selenium-webdriver'); +var perfUtils = require(__srcdir + '/utils/performanceUtils.js'); /* Before(function (scenario, callback) { @@ -22,7 +23,6 @@ BeforeAll(async function (scenario, callback) { callback(); })*/ - async function writeScreenShot(filename) { filename = filename.replace(/\s+/g, '_'); return await __wdriver.takeScreenshot().then(async (image, err) => { @@ -64,6 +64,8 @@ let currentFeature = ''; Before(async function (scenario){ + __currentFeature = scenario.sourceLocation.uri.substring(scenario.sourceLocation.uri.lastIndexOf("/") + 1).replace('.','-') + __currentScenario = scenario.pickle.name.trim(); //safety kill any live data generator if(!await scenarioContainsTag(scenario, '@use-live-data') && __liveDataGenRunning){ console.log("killing live generator"); @@ -142,6 +144,11 @@ AfterAll(async function ( ) { console.log("killing live generator"); __killLiveDataGen = true; } + + if(perfUtils.performanceLog.length > 0){ + await perfUtils.writePerformanceLog(); + await perfUtils.writePerfomanceReport(); + } }); diff --git a/e2e/scripts/Dockerfile.cloud b/e2e/scripts/Dockerfile.cloud new file mode 100644 index 00000000000..76f9d16926f --- /dev/null +++ b/e2e/scripts/Dockerfile.cloud @@ -0,0 +1,15 @@ +FROM node:12.16 + +USER root + +COPY package.json /home/e2e/package.json + +WORKDIR /home/e2e + +RUN npm install + +COPY . /home/e2e + +ENV PATH $PATH:./node_modules/.bin + +ENTRYPOINT scripts/cloud-test.sh diff --git a/e2e/scripts/Dockerfile.tests b/e2e/scripts/Dockerfile.tests new file mode 100644 index 00000000000..3d11a266d04 --- /dev/null +++ b/e2e/scripts/Dockerfile.tests @@ -0,0 +1,14 @@ +FROM node:12.16 + +USER root + +COPY package.json /home/e2e/package.json + +WORKDIR /home/e2e + +RUN npm install + +COPY . /home/e2e + +ENV PATH $PATH:./node_modules/.bin + diff --git a/e2e/scripts/cloud-test.sh b/e2e/scripts/cloud-test.sh new file mode 100755 index 00000000000..89f432e9174 --- /dev/null +++ b/e2e/scripts/cloud-test.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +npm test -- activeConf=cloud cloud +sleep 15 +npm run report:html +npm run report:junit diff --git a/e2e/scripts/containerTests.sh b/e2e/scripts/containerTests.sh new file mode 100755 index 00000000000..b4da33ea384 --- /dev/null +++ b/e2e/scripts/containerTests.sh @@ -0,0 +1,140 @@ +#!/usr/bin/env bash + +APP_ROOT="$(dirname "$(dirname "$(readlink -fm "$0")")")" +TEST_CONTAINER=bonitoo_e2e +INFLUX2_CONTAINER=influx2_solo +E2E_MAP_DIR=/tmp/e2e +INFLUX2_HOST=$(sudo docker inspect -f "{{ .NetworkSettings.IPAddress }}" ${INFLUX2_CONTAINER}) +INFLUX2_URL="http://${INFLUX2_HOST}:9999" +#TAGS="@influx-influx" +ACTIVE_CONF=development + +POSITIONAL=() +while [[ $# -gt 0 ]] +do +key="$1" + +case $key in + -t| --tags) + TAGS="$2" + shift; # past argument + shift; # past val + ;; + -c| --config) + ACTIVE_CONF="$2" + shift; + shift; + ;; + -b| --base) + BASE_DIR="$2" + shift; + shift; +esac +done + +echo E2E_CLOUD_DEFAULT_USER_USERNAME = ${E2E_CLOUD_DEFAULT_USER_USERNAME} + +echo Working from ${APP_ROOT} + +DOCKER_TEST_CMD="npm test -- activeConf=${ACTIVE_CONF}" + +if [[ -n "${TAGS}" ]]; then + DOCKER_TEST_CMD="${DOCKER_TEST_CMD} --tags ${TAGS}" +fi + +if [[ -n "${BASE_DIR+x}" ]]; then + DOCKER_TEST_CMD="${DOCKER_TEST_CMD} ${BASE_DIR}" +fi + +echo DOCKER_TEST_CMD = ${DOCKER_TEST_CMD} + +if [ ${ACTIVE_CONF} = 'cloud' ]; then + echo configuring for cloud + if [ -z ${E2E_CLOUD_INFLUX_URL+x} ]; then + echo + echo E2E_CLOUD_INFLUX_URL is unset + echo But cloud configuration chosen + echo + echo Please set E2E_CLOUD_INFLUX_URL to use cloud configuration + exit 1 + else + echo E2E_CLOUD_INFLUX_URL ${E2E_CLOUD_INFLUX_URL} + INFLUX2_URL=${E2E_CLOUD_INFLUX_URL} + fi + + if [ -z ${E2E_CLOUD_DEFAULT_USER_PASSWORD} ]; then + echo + echo E2E_CLOUD_DEFAULT_USER_PASSWORD is unset + echo But cloud configuration chosen + echo + echo Please set E2E_CLOUD_DEFAULT_USER_PASSWORD to use cloud configuration + exit 1 + fi +fi + +echo "------ Targeting influx at ${INFLUX2_URL} ------" + + +# Tear down running test container +echo "----- Tearing down test container ${TEST_CONTAINER} ------" + +if docker container inspect ${TEST_CONTAINER} > /dev/null 2>&1 +then + + if [ "$( docker container inspect -f '{{.State.Running}}' ${TEST_CONTAINER} )" == "true" ]; then + + echo stopping ${TEST_CONTAINER} + sudo docker stop ${TEST_CONTAINER} + + fi + + echo removing ${TEST_CONTAINER} + sudo docker rm ${TEST_CONTAINER} + +fi + +# Ensure mapped dirs are current +echo "----- Ensuring linked dir for volumes is current ------" +if [ -L ${E2E_MAP_DIR}/etc ]; then + echo ${E2E_MAP_DIR}/etc is linked + echo removing ${E2E_MAP_DIR} + sudo rm -r ${E2E_MAP_DIR} +fi +sudo mkdir -p ${E2E_MAP_DIR} +echo linking ${APP_ROOT}/etc +sudo ln -s ${APP_ROOT}/etc ${E2E_MAP_DIR}/etc + +echo "------ (Re)start Selenoid ------" +source ${APP_ROOT}/scripts/selenoid.sh +echo SELENOID_HOST is ${SELENOID_HOST} + +# Rebuild and start test container +echo "----- Rebuilding test container ${TEST_CONTAINER} ------" +if [[ -d "$APP_ROOT/report" ]]; then + echo cleaning "$APP_ROOT/report" + sudo npm run clean + rm -rdf report +fi + +DOCKER_ENVARS="-e SELENIUM_REMOTE_URL=http://${SELENOID_HOST}:4444/wd/hub -e E2E_${ACTIVE_CONF^^}_INFLUX_URL=${INFLUX2_URL}" + +if [ -n ${E2E_CLOUD_DEFAULT_USER_PASSWORD} ]; then + DOCKER_ENVARS="${DOCKER_ENVARS} -e E2E_CLOUD_DEFAULT_USER_PASSWORD=${E2E_CLOUD_DEFAULT_USER_PASSWORD}" +fi + +sudo docker build -t e2e-${TEST_CONTAINER} -f scripts/Dockerfile.tests . +sudo docker run -it -v `pwd`/report:/home/e2e/report -v `pwd`/screenshots:/home/e2e/screenshots \ + -v /tmp/e2e/etc:/home/e2e/etc -v /tmp/e2e/downloads:/home/e2e/downloads \ + ${DOCKER_ENVARS} --detach \ + --name ${TEST_CONTAINER} e2e-${TEST_CONTAINER}:latest + +echo ACTIVE_CONF ${ACTIVE_CONF} BASE_DIR ${BASE_DIR} TAGS ${TAGS} + +sudo docker exec ${TEST_CONTAINER} ${DOCKER_TEST_CMD} + +sudo docker exec ${TEST_CONTAINER} npm run report:html +sudo docker exec ${TEST_CONTAINER} npm run report:junit + +sudo docker stop ${TEST_CONTAINER} +sudo docker stop selenoid + diff --git a/e2e/scripts/selenoid.sh b/e2e/scripts/selenoid.sh new file mode 100755 index 00000000000..69c685f71a2 --- /dev/null +++ b/e2e/scripts/selenoid.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +APP_ROOT="$(dirname "$(dirname "$(readlink -fm "$0")")")" + +sudo docker stop selenoid +sudo docker rm selenoid + +sudo docker run -d --name selenoid -p 4444:4444 -v /var/run/docker.sock:/var/run/docker.sock \ + -v ${APP_ROOT}/etc/selenoid/config/:/etc/selenoid/:ro aerokube/selenoid:latest-release \ + -enable-file-upload -video-output-dir ${APP_ROOT}/etc/selenoid/video -timeout 5m0s + +sleep 10 + +export SELENOID_HOST=$(sudo docker inspect -f "{{ .NetworkSettings.IPAddress }}" selenoid) +echo SELENOID_HOST ${SELENOID_HOST} + +sudo docker pull selenoid/vnc:chrome_83.0 +#sudo docker pull selenoid/video-recorder:latest-release + +if [ "$1" = "-debug" ]; then + echo debugging + sudo docker stop selenoid-ui + sudo docker rm selenoid-ui + docker run -d --name selenoid-ui -p 8080:8080 aerokube/selenoid-ui --selenoid-uri http://${SELENOID_HOST}:4444 +fi diff --git a/e2e/src/pages/basePage.js b/e2e/src/pages/basePage.js index 68bca4b287c..41beff7ce12 100644 --- a/e2e/src/pages/basePage.js +++ b/e2e/src/pages/basePage.js @@ -63,7 +63,7 @@ class basePage{ } async waitUntilElementVisibleCss(selector){ - await this.driver.wait(until.elementIsVisible(this.driver.findElement(By.css(selector)))); + await this.driver.wait(until.elementIsVisible(await this.driver.findElement(By.css(selector)))); } diff --git a/e2e/src/pages/cloud/cloudLoginPage.js b/e2e/src/pages/cloud/cloudLoginPage.js index 7cacc045ad3..f82c2892b08 100644 --- a/e2e/src/pages/cloud/cloudLoginPage.js +++ b/e2e/src/pages/cloud/cloudLoginPage.js @@ -1,20 +1,37 @@ const { By, Condition, until, StaleElementReferenceError} = require('selenium-webdriver'); - -const urlCtx = 'signin'; +const basePage = require(__srcdir + '/pages/basePage.js'); const emailInput = '[data-testid=\'input-field\']'; const passwordInput = '[data-testid=\'visibility-input\']'; const logInButton = '[data-testid=\'button\']'; const logInPanel = '[data-testid=\'panel\']'; +const googleLoginButton = '[data-testid=button-base]'; +const signupLink = '//*[text()=\'Sign Up\']'; +const forgotPassLink = '//*[text()=\'Forgot Password\']'; + +const urlCtx = 'login'; + -class cloudLoginPage { +class cloudLoginPage extends basePage { constructor(driver) { - //super(driver); + super(driver); this.urlCtx = urlCtx; this.driver = driver; } + async isLoaded(){ + await super.isLoaded([ + {type: 'css', selector: emailInput}, + {type: 'css', selector: passwordInput}, + {type: 'css', selector: logInButton}, + {type: 'css', selector: logInPanel}, + {type: 'css', selector: googleLoginButton}, + {type: 'xpath', selector: signupLink}, + {type: 'xpath', selector: forgotPassLink} + ], urlCtx); + } + async waitToLoad(timeout = 10000){ await this.driver.wait(until.elementLocated(By.css(logInPanel)), timeout, `Login controls failed to load in ${timeout} milliseonds `); diff --git a/e2e/src/pages/home/homePage.js b/e2e/src/pages/home/homePage.js index 2765c8d5f3c..2e84a72d505 100644 --- a/e2e/src/pages/home/homePage.js +++ b/e2e/src/pages/home/homePage.js @@ -1,7 +1,7 @@ const influxPage = require(__srcdir + '/pages/influxPage.js'); const { By } = require('selenium-webdriver'); -const logoutButton = '[data-testid=button][title=Logout]'; +const logoutButton = '[title=Logout]'; const getStartedDataCollect = '//*[@data-testid=\'panel\'][./div[contains(@class, \'getting-started\')]][.//span[text()=\'Load your data\']]'; const getStartedDashboard = '//*[@data-testid=\'panel\'][./div[contains(@class, \'getting-started\')]][.//span[text()=\'Build a dashboard\']]'; const getStartedAlerting = '//*[@data-testid=\'panel\'][./div[contains(@class, \'getting-started\')]][.//span[text()=\'Set up alerting\']]'; diff --git a/e2e/src/pages/loadData/tokensTab.js b/e2e/src/pages/loadData/tokensTab.js index b0195a3e5e6..ed931eaeff5 100644 --- a/e2e/src/pages/loadData/tokensTab.js +++ b/e2e/src/pages/loadData/tokensTab.js @@ -7,19 +7,19 @@ const tokenListing = '[data-testid=resource-list]'; //const descHeader = '[data-testid=index-list--header-cell]:nth-of-type(1)';// header no longer present 10.6 //const statusHeader = '[data-testid=index-list--header-cell]:nth-of-type(2)'; //header no longer present 10.6 //const createVariableBody = '[data-testid=button-create-initial]'; -const tokenCellTemplate = '//*[@data-testid=\'resource-card\'][.//span[text()="%DESCR%"]]'; +const tokenCellTemplate = '[data-testid="token-card %DESCR%"]'; const generateTokenDropdownBtn = '[data-testid=dropdown-button--gen-token]'; const generateTokenItem = '[data-testid=\'dropdown-item generate-token--%ITEM%\']'; -const tokenCardDisableToggle = '//*[@data-testid = \'resource-card\'][.//span[text() = \'%DESCR%\']]//*[@data-testid=\'slide-toggle\']'; +const tokenCardDisableToggle = '[data-testid = \'token-card %DESCR%\'] [data-testid=\'slide-toggle\']'; const tokenSorterButton = '[data-testid=resource-sorter--button]'; const tokenSorterItem = '[data-testid=resource-sorter--%ITEM%]'; const tokensSortByDescription = '//*[@data-testid=\'index-list--header-cell\'][text()=\'Description\']'; -const tokenDescription = '//*[@data-testid=\'resource-editable-name\'][.//span[text()=\'%DESCR%\']]'; -const tokenDescriptionEditBtn = '//*[./*[@data-testid=\'resource-editable-name\'][.//span[text()=\'%DESCR%\']]]//*[@data-testid=\'resource-editable-name--button\']'; +const tokenDescription = '[data-testid=\'token-name %DESCR%\']'; +const tokenDescriptionEditBtn = '[data-testid=\'token-card %DESCR%\'] [data-testid=\'resource-editable-name--button\']'; const tokenDescriptionEditInput = '//*[./*[@data-testid=\'resource-editable-name--input--default\']]/input'; -const tokenCardDeleteButton = '//*[@data-testid=\'resource-card\'][.//span[text()="%DESCR%"]]//*[@data-testid=\'context-menu\']'; +const tokenCardDeleteButton = '[data-testid=\'token-card %DESCR%\'] [data-testid=\'context-menu\']'; // next selector is deprecated - todo clean up -const tokenCardDeleteConfirm = '//*[@data-testid=\'resource-card\'][.//span[text()="%DESCR%"]]//*[@data-testid=\'delete-token\']'; +const tokenCardDeleteConfirm = '[data-testid=\'token-card %DESCR%\'] [data-testid=\'delete-token\']'; const tokenCardPopoverDeletConfirm = '//*[@data-testid=\'delete-token--popover--dialog\']//*[text() = \'Confirm\']'; // Generate Read/Write token popup @@ -32,9 +32,9 @@ const selectAllBuckets = '//*[@data-testid=\'flex-box\'][div[text()=\'%MODE%\']] const deselectAllBuckets = '//*[@data-testid=\'flex-box\'][div[text()=\'%MODE%\']]//*[@title=\'Deselect All\']'; //Generate All Access token popup -const allAccessDescrInput = '[data-testid=form-container] [data-testid=input-field]'; +const allAccessDescrInput = '[data-testid=all-access-token-input]'; const allAccessCancelButton = '[data-testid=button][title=Cancel]'; -const allAccessSaveButton = '[data-testid=button][title=Save]'; +const allAccessSaveButton = '[data-testid=button--save]'; //Review token popup const tokenReviewTokenCode = 'div.code-snippet--text pre code'; @@ -62,11 +62,11 @@ class tokensTab extends settingsPage{ } async getTokenCellByDescr(descr){ - return await this.driver.findElement(By.xpath(tokenCellTemplate.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenCellTemplate.replace('%DESCR%', descr))); } static getTokenCellSelectorByDescr(descr){ - return { type: 'xpath', selector: tokenCellTemplate.replace('%DESCR%', descr)}; + return { type: 'css', selector: tokenCellTemplate.replace('%DESCR%', descr)}; } async getGenerateTokenDropdownBtn(){ @@ -136,11 +136,11 @@ class tokensTab extends settingsPage{ } async getTokenCardDisableToggle(descr){ - return await this.driver.findElement(By.xpath(tokenCardDisableToggle.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenCardDisableToggle.replace('%DESCR%', descr))); } async getTokenCardDescriptions(){ - return await this.driver.findElements(By.xpath('//*[@data-testid=\'resource-editable-name\']')); + return await this.driver.findElements(By.css('[data-testid^="token-name"]')); } async getTokenSorterButton(){ @@ -157,11 +157,11 @@ class tokensTab extends settingsPage{ } async getTokenDescription(descr){ - return await this.driver.findElement(By.xpath(tokenDescription.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenDescription.replace('%DESCR%', descr))); } async getTokenDescriptionEditBtn(descr){ - return await this.driver.findElement(By.xpath(tokenDescriptionEditBtn.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenDescriptionEditBtn.replace('%DESCR%', descr))); } async getTokenDescriptionEditInput(descr){ @@ -181,11 +181,11 @@ class tokensTab extends settingsPage{ } async getTokenCardDeleteButton(descr){ - return await this.driver.findElement(By.xpath(tokenCardDeleteButton.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenCardDeleteButton.replace('%DESCR%', descr))); } async getTokenCardDeleteConfirm(descr){ - return await this.driver.findElement(By.xpath(tokenCardDeleteConfirm.replace('%DESCR%', descr))); + return await this.driver.findElement(By.css(tokenCardDeleteConfirm.replace('%DESCR%', descr))); } async getTokenCardPopoverDeletConfirm(){ diff --git a/e2e/src/pages/monitoring/alertsPage.js b/e2e/src/pages/monitoring/alertsPage.js index 1e227e12f85..793a81acfb4 100644 --- a/e2e/src/pages/monitoring/alertsPage.js +++ b/e2e/src/pages/monitoring/alertsPage.js @@ -7,16 +7,16 @@ const createEndpointButton = '[data-testid=create-endpoint]'; const createRuleButton = '[data-testid=create-rule]'; const checksFilterInput = '[data-testid=\'filter--input checks\']'; const checksQuestionMark = '[data-testid=\'Checks--question-mark\']'; -const checksTooltipContents = '[data-testid=\'Checks--question-mark-tooltip--contents\']'; +const checksTooltipContents = '[data-testid=\'Checks--question-mark--tooltip--contents\']'; const alertingTab = '[data-testid=alerting-tab--%TABNAME%]'; const createCheckDropdown = '[data-testid=\'checks--column\'] [data-testid=\'dropdown-menu--contents\']'; const createCheckDropdownItem = '[data-testid=\'dropdown-menu--contents\'] [data-testid=create-%ITEM%-check]'; const endpointsFilterInput = '[data-testid=\'filter--input endpoints\']'; const endpointsQuestionMark = '[data-testid=\'Notification Endpoints--question-mark\']'; -const endpointsTooltipContents = '[data-testid=\'Notification Endpoints--question-mark-tooltip--contents\']'; +const endpointsTooltipContents = '[data-testid=\'Notification Endpoints--question-mark--tooltip--contents\']'; const rulesFilterInput = '[data-testid=\'filter--input rules\']'; const rulesQuestionMark = '[data-testid=\'Notification Rules--question-mark\']'; -const rulesTooltipContents = '[data-testid=\'Notification Rules--question-mark-tooltip--contents\']'; +const rulesTooltipContents = '[data-testid=\'Notification Rules--question-mark--tooltip--contents\']'; const firstTimeThresholdCheckCreateButton = '[data-testid=\'checks--column\'] [data-testid=panel--body] [data-testid=button][title=\'Threshold Check\']'; const firstTimeDeadmanCheckCreateButton = '[data-testid=\'checks--column\'] [data-testid=panel--body] [data-testid=button][title=\'Deadman Check\']'; const emptyStateColumnText = '[data-testid=\'%COL%--column\'] [data-testid=\'empty-state--text\']'; diff --git a/e2e/src/pages/settings/templatesTab.js b/e2e/src/pages/settings/templatesTab.js index 4fa1da9fed1..116659221d0 100644 --- a/e2e/src/pages/settings/templatesTab.js +++ b/e2e/src/pages/settings/templatesTab.js @@ -22,6 +22,7 @@ const importTemplateUploadButton = '[data-testid=overlay--body] [data-testid=sel const importTemplatePasteButton = '[data-testid=overlay--body] [data-testid=select-group--option][title=Paste]'; const importTemplateJSONTextArea = '[data-testid=overlay--body] [data-testid=import-overlay--textarea]'; const importTemplateDragNDrop = '[data-testid=overlay--body] input[type=file]'; +const importTemplateSubmitButton = '[data-testid=\'submit-button Template\']'; class templatesTab extends settingsPage{ @@ -77,6 +78,10 @@ class templatesTab extends settingsPage{ return await this.driver.findElement(By.css(importTemplateDragNDrop)); } + async getImportTemplateSubmitButton(){ + return await this.driver.findElement(By.css(importTemplateSubmitButton)); + } + async getTemplateCardByName(name){ return await this.driver.findElement(By.xpath(templateCardByName.replace('%NAME%', name))); } diff --git a/e2e/src/pages/settings/variablesTab.js b/e2e/src/pages/settings/variablesTab.js index 66c1132bc6f..d4c5af3adc8 100644 --- a/e2e/src/pages/settings/variablesTab.js +++ b/e2e/src/pages/settings/variablesTab.js @@ -22,7 +22,7 @@ const urlCtx = 'variables'; const uploadRadioButton = '[data-testid=overlay--body] [data-testid=select-group--option][title=Upload]'; const pasteRadioButton = '[data-testid=overlay--body] [data-testid=select-group--option][title=Paste]'; const dragNDropFile = 'input[type=file]'; //N.B. has display:none -const importButton = '[data-testid=overlay--footer] [data-testid=button]'; +const importButton = '[data-testid=overlay--footer] [data-testid=\'submit-button Variable\']'; const pasteJSONTextarea = '[data-testid=overlay--body] [data-testid=import-overlay--textarea]'; const importVariableDragNDropHeader = '.drag-and-drop--header'; diff --git a/e2e/src/step_definitions/common/cloudStepDefs.js b/e2e/src/step_definitions/common/cloudStepDefs.js index b80b3ff9890..4360cf1ce90 100644 --- a/e2e/src/step_definitions/common/cloudStepDefs.js +++ b/e2e/src/step_definitions/common/cloudStepDefs.js @@ -3,9 +3,15 @@ const cloudSteps = require(__srcdir + '/steps/cloudSteps.js'); let cSteps = new cloudSteps(__wdriver); +When(/^open the cloud page in "(.*?)" milliseconds$/, {timeout: 15000}, async (maxDelay) => { + await cSteps.openCloudPage(parseInt(maxDelay)); +}); + +/* When(/^setup default cloud user$/, async () => { await cSteps.setupDefaultCloudUser(); }); +*/ When(/^I open the cloud login$/, {timeout: 30000}, async () => { await cSteps.openCloudLogin(); @@ -14,3 +20,23 @@ When(/^I open the cloud login$/, {timeout: 30000}, async () => { When(/^log in to the cloud$/, async () => { await cSteps.logInToCloud(); }); + +When(/^log in to the cloud in "(.*)" milliseconds$/, {timeout: 15000}, async (maxDelay) => { + await cSteps.logInToCloudTimed(parseInt(maxDelay)); +}); + +When(/^I logout to account info in "(.*)" milliseconds$/, {timeout: 15000}, async (maxDelay) => { + await cSteps.logoutToAccountInfoTimed(parseInt(maxDelay)); +}); + +When(/^I logout to login page in "(.*)" milliseconds$/, {timeout: 15000}, async(maxDelay) => { + await cSteps.logoutToLoginTimed(parseInt(maxDelay)); +}); + +When(/^wait "(.*?)" with delay "(.*?)"$/, async (sleep, delay) => { + await cSteps.performanceBogusTest(sleep, parseInt(delay)); +}); + +Then(/^the cloud login page is loaded$/, {timeout: 15000}, async () => { + await cSteps.verifyCloudLoginPageLoaded(); +}); diff --git a/e2e/src/step_definitions/common/commonStepDefs.js b/e2e/src/step_definitions/common/commonStepDefs.js index 5e97d56dccc..c2052e330f9 100644 --- a/e2e/src/step_definitions/common/commonStepDefs.js +++ b/e2e/src/step_definitions/common/commonStepDefs.js @@ -329,11 +329,11 @@ Then(/^the file "(.*)" has been downloaded$/, async filePath => { }); When(/^remove files "(.*)" if exists$/, async regex => { - await influxUtils.removeFilesByRegex(regex); + await influxUtils.removeDownloadFilesByRegex(regex); }); Then(/^a file matching "(.*)" exists$/, async regex => { - await bSteps.verifyFileMatchingRegexExists(regex); + await bSteps.verifyDownloadFileMatchingRegexExists(regex); }); When(/^verify first CSV file matching "(.*)" as containing$/, async (path, dataDesc) => { diff --git a/e2e/src/step_definitions/influx/influxStepDefs.js b/e2e/src/step_definitions/influx/influxStepDefs.js index 1d9fd55e195..4e90e927fb5 100644 --- a/e2e/src/step_definitions/influx/influxStepDefs.js +++ b/e2e/src/step_definitions/influx/influxStepDefs.js @@ -38,6 +38,10 @@ When(/^click nav sub menu "(.*?)"$/, async(item) => { await iSteps.clickSubMenuItem(item); }); +When(/^click user nav item "(.*)"$/, async (item) => { + await iSteps.clickUserMenuItem(item); +}); + When(/^click nav menu item "(.*?)"$/, async(item) => { await iSteps.clickMenuItem(item); }); diff --git a/e2e/src/step_definitions/settings/templatesStepDefs.js b/e2e/src/step_definitions/settings/templatesStepDefs.js index c324e45852c..c95f30dc344 100644 --- a/e2e/src/step_definitions/settings/templatesStepDefs.js +++ b/e2e/src/step_definitions/settings/templatesStepDefs.js @@ -64,6 +64,10 @@ When(/^upload the template file "(.*)"$/, async filePath => { await tpltSteps.uploadTemplateFile(filePath); }); +When(/^click import template popup submit button$/, async () => { + await tpltSteps.clickImportTemplateSubmitButton(); +}); + Then(/^there is a template card named "(.*)"$/, async name => { await tpltSteps.verifyTemplateCardVisibility(name); }); diff --git a/e2e/src/steps/baseSteps.js b/e2e/src/steps/baseSteps.js index 641ef3f053e..7dd8b9d7f92 100644 --- a/e2e/src/steps/baseSteps.js +++ b/e2e/src/steps/baseSteps.js @@ -403,11 +403,28 @@ class baseSteps{ async typeTextAndWait(input, text, wait = async () => { await this.driver.sleep((await this.driver.manage().getTimeouts()).implicit/20); }) { //wait 1/10th implicit timeout) + await input.sendKeys(text).then(async() => { await wait(); }); } + //Sometimes - rare times - sendKeys in typeTextAndWait() above seems to drop a char + //Use this as a work around + async typeTextParanoAndWait(input, text, + wait = async () => { await this.driver.sleep((await this.driver.manage().getTimeouts()).implicit/20); }){ + + let chars = text.split(''); + + for(let c of chars){ + await input.sendKeys(c).then(async () => { + this.driver.sleep(167) + }) + } + await wait(); + } + + async verifyElementText(element, text){ await element.getText().then(async elText => { await expect(elText).to.equal(text); @@ -605,6 +622,9 @@ class baseSteps{ } async setFileUpload(filePath){ + await fs.readdir('etc/test-data', function(err, items){ + console.log("DEBUG etc/test-data: \n" + items); + }); await this.basePage.getPopupFileUpload().then(async elem => { await elem.sendKeys(process.cwd() + '/' + filePath).then(async () => { await this.delay(200); //debug wait - todo better wait @@ -649,8 +669,8 @@ class baseSteps{ await expect(await influxUtils.fileExists(filePath)).to.be.true; } - async verifyFileMatchingRegexExists(regex){ - let res = await influxUtils.verifyFileMatchingRegexFilesExist(regex); + async verifyDownloadFileMatchingRegexExists(regex){ + let res = await influxUtils.verifyDownloadFileMatchingRegexFilesExist(regex); await expect(res).to.be.true; } diff --git a/e2e/src/steps/cloudSteps.js b/e2e/src/steps/cloudSteps.js index 34fc116870d..0ec60934fa2 100644 --- a/e2e/src/steps/cloudSteps.js +++ b/e2e/src/steps/cloudSteps.js @@ -1,7 +1,11 @@ +const { until } = require('selenium-webdriver') + const baseSteps = require(__srcdir + '/steps/baseSteps.js'); //const createOrgPage = require(__srcdir + '/pages/createOrgPage.js'); const cloudLoginPage = require(__srcdir + '/pages/cloud/cloudLoginPage.js'); const influxUtils = require(__srcdir + '/utils/influxUtils.js'); +const perfUtils = require(__srcdir + '/utils/performanceUtils.js'); +const homePage = require(__srcdir + '/pages/home/homePage.js'); class cloudSteps extends baseSteps { @@ -9,6 +13,7 @@ class cloudSteps extends baseSteps { super(driver); //this.createOrgPage = new createOrgPage(driver); this.loginPage = new cloudLoginPage(driver); + this.homePage = new homePage(driver); } //for driver sync @@ -24,6 +29,20 @@ class cloudSteps extends baseSteps { //this.assertVisible(await this.createOrgPage.getbuttonCreate()); } + async openCloudPage(maxDelay){ + await perfUtils.execTimed(async () => { + await this.driver.get(__config.influx_url); + await this.loginPage.waitToLoad(10000); + }, + maxDelay, 'timely redirect failed'); + } + + async performanceBogusTest(sleep, delay){ + await perfUtils.execTimed( async() => { + await this.driver.sleep(sleep); + }, delay, "bogus test failed", 'bogus test succeeded'); + } + async setupDefaultCloudUser(){ await influxUtils.setupCloudUser('DEFAULT'); } @@ -41,6 +60,47 @@ class cloudSteps extends baseSteps { await this.clickAndWait(await this.loginPage.getLogInButton()); } + async logInToCloudTimed(maxDelay){ + await this.typeTextAndWait(await this.loginPage.getEmailInput(), __defaultUser.username); + await this.typeTextAndWait(await this.loginPage.getPasswordInput(), __defaultUser.password); + await perfUtils.execTimed(async () => { + await this.clickAndWait(await this.loginPage.getLogInButton()); + try { + await this.driver.wait(until.elementIsVisible(await this.homePage.getNavMenu()), maxDelay * 3); + await this.driver.wait(until.elementIsVisible(await this.homePage.getPageHeader()), maxDelay * 3); + }catch(err){ + console.warn(JSON.stringify(err)); + //try again + await this.driver.wait(until.elementIsVisible(await this.homePage.getNavMenu()), maxDelay * 3); + await this.driver.wait(until.elementIsVisible(await this.homePage.getPageHeader()), maxDelay * 3); + } + },maxDelay, 'failed to timely open cloud '); + } + + //TODO - this is not checking correct page - see issue #19057 + //because currently not loading account info on logout like before + //not sure why... once logout to correct page is fixed update this method + //this is a holder so that other tests can be written + async logoutToAccountInfoTimed(maxDelay){ + await perfUtils.execTimed(async () => { + await this.clickAndWait(await this.homePage.getLogoutButton()); + await this.driver.wait(this.homePage.isLoaded(), maxDelay * 3); + }, maxDelay, 'failed to timely open info'); + } + + async logoutToLoginTimed(maxDelay){ + await perfUtils.execTimed(async () => { + await this.clickAndWait(await this.homePage.getLogoutButton()); + await this.loginPage.waitToLoad(10000); + }, maxDelay, 'login slow to reload'); + } + + async verifyCloudLoginPageLoaded(){ + await this.loginPage.isLoaded(); + } + + + } module.exports = cloudSteps; diff --git a/e2e/src/steps/dashboards/cellOverlaySteps.js b/e2e/src/steps/dashboards/cellOverlaySteps.js index 8fe6cb3549f..49d184edd91 100644 --- a/e2e/src/steps/dashboards/cellOverlaySteps.js +++ b/e2e/src/steps/dashboards/cellOverlaySteps.js @@ -3,6 +3,8 @@ const expect = require('chai').expect; const Key = require('selenium-webdriver').Key; const { By, Origin } = require('selenium-webdriver'); +const influxUtils = require(__srcdir + '/utils/influxUtils.js'); + const influxSteps = require(__srcdir + '/steps/influx/influxSteps.js'); const cellEditOverlay = require(__srcdir + '/pages/dashboards/cellEditOverlay.js'); @@ -995,6 +997,7 @@ class cellOverlaySteps extends influxSteps { async clickTMDownloadCSV(){ await this.clickAndWait(await this.cellOverlay.getTMDownloadCSV(), async () => { await this.driver.sleep(2000) }); //todo better wait - 2 sec to download + await influxUtils.dumpDownloadDir(); } async clickTMQEVariablesTab(){ diff --git a/e2e/src/steps/influx/influxSteps.js b/e2e/src/steps/influx/influxSteps.js index 3ce0750a939..84c60ad4966 100644 --- a/e2e/src/steps/influx/influxSteps.js +++ b/e2e/src/steps/influx/influxSteps.js @@ -122,6 +122,10 @@ class influxSteps extends baseSteps { await this.clickAndWait(await this.getNavMenuElem(item), wait); } + async clickUserMenuItem(item){ + await this.clickAndWait(await this.getUserMenuElem(item.toLowerCase())); + } + async clickSubMenuItem(item, wait = async () => { await this.driver.sleep(1000); }){ if(item.toLowerCase() === 'dashboards'){//troubleshoot issue in circleci diff --git a/e2e/src/steps/loadData/telegrafsSteps.js b/e2e/src/steps/loadData/telegrafsSteps.js index f3fc36199b2..0edbab19578 100644 --- a/e2e/src/steps/loadData/telegrafsSteps.js +++ b/e2e/src/steps/loadData/telegrafsSteps.js @@ -126,7 +126,7 @@ class telegrafsSteps extends loadDataSteps{ await this.assertVisible(await this.teleTab.getCopyToClipboardToken()); await this.assertVisible(await this.teleTab.getCopyToClipboardCommand()); await this.verifyElementContainsText(await this.teleTab.getCodeToken(), 'INFLUX_TOKEN'); - await this.verifyElementContainsText(await this.teleTab.getCodeCliTelegraf(), 'telegraf --config http://localhost:9999/api/v2/telegrafs/'); + await this.verifyElementContainsText(await this.teleTab.getCodeCliTelegraf(), `telegraf --config ${__config.influx_url}/api/v2/telegrafs/`); } async verifyCreateWizardStep2PluginsList(plugins){ @@ -359,7 +359,7 @@ class telegrafsSteps extends loadDataSteps{ await this.assertVisible(await this.teleTab.getCopyToClipboardToken()); await this.assertVisible(await this.teleTab.getCopyToClipboardCommand()); await this.verifyElementContainsText(await this.teleTab.getCodeToken(), 'INFLUX_TOKEN'); - await this.verifyElementContainsText(await this.teleTab.getCodeCliTelegraf(), 'telegraf --config http://localhost:9999/api/v2/telegrafs/'); + await this.verifyElementContainsText(await this.teleTab.getCodeCliTelegraf(), `telegraf --config ${__config.influx_url}/api/v2/telegrafs/`); } async verifyTelegrafConfigPopup(name){ diff --git a/e2e/src/steps/loadData/tokensSteps.js b/e2e/src/steps/loadData/tokensSteps.js index 415374426d1..3cb3670eeec 100644 --- a/e2e/src/steps/loadData/tokensSteps.js +++ b/e2e/src/steps/loadData/tokensSteps.js @@ -10,7 +10,7 @@ const adminPermissions = ['authorizations', 'sources', 'tasks', 'telegrafs', - 'users', + 'users-admin', 'variables', 'scrapers', 'secrets', diff --git a/e2e/src/steps/monitoring/checkEditSteps.js b/e2e/src/steps/monitoring/checkEditSteps.js index fdd4230da3f..2e3de17ee1f 100644 --- a/e2e/src/steps/monitoring/checkEditSteps.js +++ b/e2e/src/steps/monitoring/checkEditSteps.js @@ -99,7 +99,7 @@ class checkEditSteps extends influxSteps { async setCheckIntervalInput(duration){ await this.clearInputText(await this.ckEdPage.getConfChkIntervalInput()); - await this.typeTextAndWait(await this.ckEdPage.getConfChkIntervalInput(), duration); + await this.typeTextParanoAndWait(await this.ckEdPage.getConfChkIntervalInput(), duration); } async verifyCkEdIntervalInput(duration){ @@ -118,12 +118,12 @@ class checkEditSteps extends influxSteps { async setCheckOffsetInput(val){ await this.clearInputText(await this.ckEdPage.getConfChkOffset()); - await this.typeTextAndWait(await this.ckEdPage.getConfChkOffset(), val); + await this.typeTextParanoAndWait(await this.ckEdPage.getConfChkOffset(), val); } async enterIntoIntervalOffset(offset){ await this.clearInputText(await this.ckEdPage.getConfChkOffset()); - await this.typeTextAndWait(await this.ckEdPage.getConfChkOffset(), offset); + await this.typeTextParanoAndWait(await this.ckEdPage.getConfChkOffset(), offset); } async verifyCkEdHintDropdownNotVisible(){ @@ -177,7 +177,7 @@ class checkEditSteps extends influxSteps { async setUnaryThresholdBoundaryValue(threshold, val1){ await this.clearInputText(await this.ckEdPage.getConfNthThresholdDefInput(threshold)); - await this.typeTextAndWait(await this.ckEdPage.getConfNthThresholdDefInput(threshold), val1); + await this.typeTextParanoAndWait(await this.ckEdPage.getConfNthThresholdDefInput(threshold), val1); } async verifyBinaryThresholdBoundaryValues(threshold, lower, upper){ @@ -273,7 +273,7 @@ class checkEditSteps extends influxSteps { async setValueDefinitionStopInput(val){ await this.clearInputText(await this.ckEdPage.getConfDeadmanStopInput()); - await this.typeTextAndWait(await this.ckEdPage.getConfDeadmanStopInput(), val); + await this.typeTextParanoAndWait(await this.ckEdPage.getConfDeadmanStopInput(), val); }; async verifyCellEditPreviewThresholdMarkers(markers){ diff --git a/e2e/src/steps/settings/templatesSteps.js b/e2e/src/steps/settings/templatesSteps.js index 558194f2712..081fbbc73f6 100644 --- a/e2e/src/steps/settings/templatesSteps.js +++ b/e2e/src/steps/settings/templatesSteps.js @@ -26,7 +26,7 @@ class templatesSteps extends baseSteps{ async verifyImportTemplatePopupLoaded(){ await this.assertVisible(await this.tmTab.getImportTemplateUploadButton()); await this.assertVisible(await this.tmTab.getImportTemplatePasteButton()); - await this.assertVisible(await this.tmTab.getPopupSubmit()); + await this.assertVisible(await this.tmTab.getImportTemplateSubmitButton()); await this.assertVisible(await this.tmTab.getPopupDismiss()); await this.assertVisible(await this.tmTab.getPopupFileUploadHeader()); await this.verifyElementContainsText(await this.tmTab.getPopupTitle(), 'Import Template'); @@ -58,9 +58,9 @@ class templatesSteps extends baseSteps{ async verifyImportTemplatePopupSubmitEnabled(enabled){ if(enabled){ - await this.verifyElementEnabled(await this.tmTab.getPopupSubmit()); + await this.verifyElementEnabled(await this.tmTab.getImportTemplateSubmitButton()); }else{ - await this.verifyElementDisabled(await this.tmTab.getPopupSubmit()); + await this.verifyElementDisabled(await this.tmTab.getImportTemplateSubmitButton()); } } @@ -92,6 +92,10 @@ class templatesSteps extends baseSteps{ }); } + async clickImportTemplateSubmitButton(){ + await this.clickAndWait(await this.tmTab.getImportTemplateSubmitButton()); + } + async verifyTemplateCardVisibility(name){ await this.assertVisible(await this.tmTab.getTemplateCardByName(name)); } diff --git a/e2e/src/utils/influxUtils.js b/e2e/src/utils/influxUtils.js index d3f3f6947bc..76a22050432 100644 --- a/e2e/src/utils/influxUtils.js +++ b/e2e/src/utils/influxUtils.js @@ -35,20 +35,22 @@ process.argv.slice(2).forEach((val) => { let pair = val.split('='); switch(pair[0]){ - case 'headless': //overrides value in config file - //config.headless = (pair[1] === 'true'); - setHeadless = true; - newHeadless = (pair[1] === 'true'); - break; - case 'sel_docker': - case 'selDocker': - //config.sel_docker = (pair[1] === 'true'); - selDocker = (pair[1] === 'true'); - break; + case 'headless': //overrides value in config file + setHeadless = true; + newHeadless = (pair[1] === 'true'); + // Need to pop our args out, otherwise they will interfere with cucumber ArgvParser + process.argv.splice(process.argv.indexOf(val), 1); + break; + case 'sel_docker': + case 'selDocker': + selDocker = (pair[1] === 'true'); + process.argv.splice(process.argv.indexOf(val), 1); + break; case 'activeConf': case 'active_conf': - //config = require(__basedir + '/e2e.conf.json')[pair[1]]; - active_config = pair[1]; + active_config = pair[1]; + process.argv.splice(process.argv.indexOf(val), 1); + break; } }); @@ -63,14 +65,67 @@ global.__users = { 'init': undefined }; global.__killLiveDataGen = false; global.__liveDataGenRunning = false; global.__reportedResetOnce = false; +global.__dockerRun = false; +global.__currentFeature = 'jar'; +global.__currentScenario = 'pickle'; + +if(typeof __config.download_dir === 'undefined'){ + __config.download_dir = __basedir; +} console.log(config.headless ? 'running headless' : 'running headed'); console.log(config.sel_docker ? 'running for selenium in docker' : 'running for selenium standard'); console.log(`active configuration ${JSON.stringify(config)}`); +__config.download_dir = __config.download_dir + '/etc/'; +/* +if(typeof process.env['HOSTNAME'] !== 'undefined' && process.env['HOSTNAME'].match(/^[a-f0-9]{12}/)){ + console.log("MATCHED docker style hostname"); + __dockerRun = true; + __config.download_dir = __config.download_dir + '/etc/'; +} +*/ +//redefine any config fields based on ENV Properties of the form E2E__ +/* +Object.keys(__config).forEach(k => { + let envar = `E2E_${active_config.toUpperCase()}_${k.toUpperCase()}`; + console.log(`${envar}=${process.env[envar]} typeof ${typeof __config[k]}`); + if(typeof process.env[envar] !== 'undefined'){ + console.log('DEBUG redefining var ' + process.env[envar]); + __config[k] = process.env[envar]; + } +}); +*/ + +const resetConfigFieldsToEnvar = (base, o) => { +// console.log(`DEBUG o ${JSON.stringify(o)}`); + Object.keys(o).forEach( k => { + let envar = `${base}_${k.toUpperCase()}`; + switch(typeof o[k]){ + case 'string': + if(typeof process.env[envar] !== 'undefined'){ + console.log(`--- resetting config val ${k} to ${envar} ---`) + o[k] = process.env[envar]; + } + break; + case 'object': + resetConfigFieldsToEnvar(envar, o[k]); + break; + default: //i.e. undefined + //do nothing + break; + } + }) +}; + +resetConfigFieldsToEnvar(`E2E_${active_config.toUpperCase()}`, __config); + //Need to keep axios for calls to /debug/flush axios.defaults.baseURL = `${config.influx_url}`; + +//Object.keys(__config).forEach(k => console.log(`__Config.${k} = ${__config[k]}`)); + /* Uncomment to debug axios axios.interceptors.request.use(request => { console.log('Starting Request', request) @@ -98,7 +153,7 @@ const removeConfInDocker = async () => { //for compatibility with old test style - until all are updated const setupUser = async(newUser) => { - console.warn("WARNING: call to user old style start " + JSON.stringify(newUser)); + console.warn("WARNING: call to user old style start " + newUser.username); if(newUser.username === 'ENV'){ await resolveUsernameFromEnv(newUser); @@ -176,7 +231,7 @@ const setupNewUser = async(newUser) => { const resolvePasswordFromEnv = async(user) => { if(user.password.toUpperCase() === 'ENV'){ - let envar = `${__config.config_id.toUpperCase()}_DEFAULT_USER_PASSWORD`; + let envar = `E2E_${__config.config_id.toUpperCase()}_DEFAULT_USER_PASSWORD`; user.password = process.env[envar] } @@ -185,7 +240,7 @@ const resolvePasswordFromEnv = async(user) => { const resolveUsernameFromEnv = async(user) => { if(user.password.toUpperCase() === 'ENV'){ - let envar = `${__config.config_id.toUpperCase()}_DEFAULT_USERNAME`; + let envar = `E2E_${__config.config_id.toUpperCase()}_DEFAULT_USER_USERNAME`; user.username = process.env[envar] } @@ -198,7 +253,7 @@ const resolveUserTokenBeforeCreate = async(user) => { } if(user.token.toUpperCase() === 'ENV'){ - let envar = `${__config.config_id.toUpperCase()}_DEFAULT_USER_TOKEN`; + let envar = `E2E_${__config.config_id.toUpperCase()}_DEFAULT_USER_TOKEN`; user.token = process.env[envar] } }; @@ -219,12 +274,12 @@ const setupUserRest = async(user) => { await setupAPI.postSetup({ body: body, }); - console.log(`--- Setup user ${JSON.stringify(user)} at ${__config.influx_url} success ---`) + console.log(`--- Setup user ${user.username} at ${__config.influx_url} success ---`) }else{ - console.error(`--- Failed to setup user ${JSON.stringify(user)} at ${__config.influx_url} ---`); + console.error(`--- Failed to setup user ${user.username} at ${__config.influx_url} ---`); } }).catch(async error => { - console.error(`\n--- Setup user ${JSON.stringify(user)} ended in ERROR ---`); + console.error(`\n--- Setup user ${user.username} ended in ERROR ---`); console.error(error) }); }; @@ -757,25 +812,42 @@ const removeFileIfExists = async function(filepath){ } }; -const removeFilesByRegex = async function(regex){ +const removeDownloadFilesByRegex = async function(regex){ let re = new RegExp(regex) - await fs.readdir('.', (err, files) => { + await fs.readdir(__config.download_dir, (err, files) => { for(var i = 0; i < files.length; i++){ var match = files[i].match(re); if(match !== null){ - fs.unlinkSync(match[0]); + fs.unlinkSync(__config.download_dir + '/' +match[0]); } } }); }; const fileExists = async function(filePath){ - return fs.existsSync(filePath); + return fs.existsSync(__config.download_dir + '/' + filePath); }; -const verifyFileMatchingRegexFilesExist = async function(regex, callback){ +const dumpDownloadDir = async function(){ + console.log("DEBUG __config.download_dir: " + __config.download_dir ) + let files = fs.readdirSync(__config.download_dir); + for(var file of files){ + console.log(" " + file ); + } + console.log("DEBUG __config.download_dir/..: " + __config.download_dir ) + files = fs.readdirSync(__config.download_dir + '/..'); + for(var file of files){ + console.log(" " + file ); + } + + +} + +const verifyDownloadFileMatchingRegexFilesExist = async function(regex, callback){ let re = new RegExp(regex); - let files = fs.readdirSync('.'); + let files = fs.readdirSync(__config.download_dir); + + console.log("DEBUG files to be matched: \n" + files); for(var i = 0; i < files.length; i++){ var match = files[i].match(re); @@ -791,14 +863,15 @@ const waitForFileToExist = async function(filePath, timeout = 60000){ let sleepTime = 3000; let totalSleep = 0; while (totalSleep < timeout){ - if(fs.existsSync(filePath)){ + if(fs.existsSync(__config.download_dir + '/' + filePath)){ return true; } await __wdriver.sleep(sleepTime); totalSleep += sleepTime; } - throw `Timed out ${timeout}ms waiting for file ${filePath}`; + + throw `Timed out ${timeout}ms waiting for file ${__config.download_dir}/${filePath}`; }; @@ -878,6 +951,7 @@ module.exports = { flush, writeData, createDashboard, createVariable, + dumpDownloadDir, getDashboards, query, createBucket, @@ -896,12 +970,12 @@ module.exports = { flush, getAuthorizations, removeConfInDocker, removeFileIfExists, - removeFilesByRegex, + removeDownloadFilesByRegex, setupNewUser, startLiveDataGen, stopLiveDataGen, fileExists, - verifyFileMatchingRegexFilesExist, + verifyDownloadFileMatchingRegexFilesExist, waitForFileToExist }; diff --git a/e2e/src/utils/performanceUtils.js b/e2e/src/utils/performanceUtils.js new file mode 100644 index 00000000000..bd6a5dec19b --- /dev/null +++ b/e2e/src/utils/performanceUtils.js @@ -0,0 +1,149 @@ +const assert = require('chai').assert; +const fs = require('fs'); + +let performanceLog = []; +let performanceRecFile = './report/performance.csv'; + +//performance record +//let rec = { name: string, state: string, expected: long, start: long, stop: long, duration: long, message: string } + +const execTimed = async ( func, maxDelay, failMsg, successMsg) => { + + if(typeof(maxDelay) !== 'number'){ + throw `maxDelay must be of type number. Got ${typeof(maxDelay)}` + } + + let start = new Date(); + let startl = start.getTime(); + await func(); + let finish = new Date(); + let finishl = finish.getTime(); + try{ + assert.isBelow(finishl-startl, maxDelay, failMsg); + performanceLog.push({name: `${__currentFeature}: ${__currentScenario}`, + state: 'pass', + maxTime: maxDelay, + duration: finishl - startl, + delta: maxDelay - (finishl - startl), + start: start.toISOString(), + message: (typeof(successMsg) === 'undefined')? 'success' : successMsg, + }); + }catch(err){ + performanceLog.push({name: `${__currentFeature}: ${__currentScenario}`, + state: 'fail', + maxTime: maxDelay, + duration: finish - start, + delta: maxDelay - (finish - start), + start: start.toISOString(), + message: (typeof(failMsg) === 'undefined') ? 'failure' : failMsg, + }); + throw err; + } +}; + +const writePerformanceLog = async () => { + if(performanceLog.length < 1){ + return; + } + console.log('\nPerformance'); + let header = []; + for(let prop in performanceLog[0]){ + if(performanceLog[0].hasOwnProperty(prop)){ + header.push(`${prop}`); + } + } + + let fieldSizes = {}; + for(let field of header){ + fieldSizes[field] = field.length; + } + + //console.log(JSON.stringify(fieldSizes)); + + for(let log of performanceLog){ + for(let field of header){ + if(log[field].toString().length > fieldSizes[field]){ + fieldSizes[field] = log[field].toString().length; + } + } + } + + for(let field of header){ + fieldSizes[field] += 2; + } + + let headerString = ''; + + for(let f of header){ + headerString += f.padEnd(fieldSizes[f]) + "| "; + } + + console.log(headerString); + + //add divider + console.log('-'.padEnd(Object.values(fieldSizes).reduce((a,b) => a+b+2), '-')); + let successCt = 0; + let failCt = 0; + for(let log of performanceLog){ + let row = ''; + for(let f of header){ + row += log[f].toString().padEnd(fieldSizes[f]) + "| "; + } + + if(log.state === 'fail'){ + console.log('\x1b[31m%s\x1b[0m', row); + failCt++; + }else{ + console.log('\x1b[32m%s\x1b[0m', row); + successCt++; + } + } + //add divider + console.log('-'.padEnd(Object.values(fieldSizes).reduce((a,b) => a+b+2), '-')); + console.log('\x1b[96m%s\x1b[0m', 'Performance scenarios'); + console.log(`total: ${performanceLog.length}`); + if(successCt > 0){ + console.log('\x1b[32m%s\x1b[0m', `success: ${successCt}` ); + } + if(failCt > 0){ + console.log('\x1b[31m%s\x1b[0m', `fail: ${failCt}` ); + } + console.log() +}; + +const writePerfomanceReport = async (filename = performanceRecFile) => { + + let header = []; + + for(let prop in performanceLog[0]){ + if(performanceLog[0].hasOwnProperty(prop)){ + header.push(`${prop}`); + } + } + + //write header if report does not yet exist + if(!fs.existsSync(filename)){ + + for(let f of header){ + await fs.appendFileSync(filename, `${f},`) + } + + await fs.appendFileSync(filename,'\n'); + + } + + + for(let log of performanceLog) { + for(let f of header){ + await fs.appendFileSync(filename, `${log[f].toString()},`); + } + await fs.appendFileSync(filename, '\n'); + } +}; + +module.exports = { + execTimed, + performanceLog, + writePerformanceLog, + writePerfomanceReport +}; From e82f2d9e0c898de0865f538e92789741695ebdc6 Mon Sep 17 00:00:00 2001 From: Alirie Gray Date: Wed, 5 Aug 2020 08:46:26 -0700 Subject: [PATCH 11/13] feat: add urm client to tenant package (#19198) --- tenant/http_client_urm.go | 127 +++++++++++++++++++++++++++++++ tenant/http_client_user.go | 6 +- tenant/http_handler_urm.go | 4 +- tenant/http_handler_urm_test.go | 122 +++++++++++++++++++++++++++++ tenant/http_server_onboarding.go | 2 +- tenant/http_server_user.go | 12 +-- tenant/service.go | 2 +- 7 files changed, 262 insertions(+), 13 deletions(-) create mode 100644 tenant/http_client_urm.go diff --git a/tenant/http_client_urm.go b/tenant/http_client_urm.go new file mode 100644 index 00000000000..25c4d2d1203 --- /dev/null +++ b/tenant/http_client_urm.go @@ -0,0 +1,127 @@ +package tenant + +import ( + "context" + "path" + + "github.com/influxdata/influxdb/v2" + "github.com/influxdata/influxdb/v2/pkg/httpc" +) + +type UserResourceMappingClient struct { + Client *httpc.Client +} + +// CreateUserResourceMapping will create a user resource mapping +func (s *UserResourceMappingClient) CreateUserResourceMapping(ctx context.Context, m *influxdb.UserResourceMapping) error { + if err := m.Validate(); err != nil { + return err + } + + urlPath := resourceIDPath(m.ResourceType, m.ResourceID, string(m.UserType)+"s") + return s.Client. + PostJSON(influxdb.User{ID: m.UserID}, urlPath). + DecodeJSON(m). + Do(ctx) +} + +// FindUserResourceMappings returns the user resource mappings +func (s *UserResourceMappingClient) FindUserResourceMappings(ctx context.Context, f influxdb.UserResourceMappingFilter, opt ...influxdb.FindOptions) ([]*influxdb.UserResourceMapping, int, error) { + var results resourceUsersResponse + err := s.Client. + Get(resourceIDPath(f.ResourceType, f.ResourceID, string(f.UserType)+"s")). + DecodeJSON(&results). + Do(ctx) + if err != nil { + return nil, 0, err + } + + urs := make([]*influxdb.UserResourceMapping, len(results.Users)) + for k, item := range results.Users { + urs[k] = &influxdb.UserResourceMapping{ + ResourceID: f.ResourceID, + ResourceType: f.ResourceType, + UserID: item.User.ID, + UserType: item.Role, + } + } + return urs, len(urs), nil +} + +// DeleteUserResourceMapping will delete user resource mapping based in criteria. +func (s *UserResourceMappingClient) DeleteUserResourceMapping(ctx context.Context, resourceID influxdb.ID, userID influxdb.ID) error { + urlPath := resourceIDUserPath(influxdb.OrgsResourceType, resourceID, influxdb.Member, userID) + return s.Client. + Delete(urlPath). + Do(ctx) +} + +// SpecificURMSvc returns a urm service with specific resource and user types. +// this will help us stay compatible with the existing service contract but also allow for urm deletes to go through the correct +// api +func (s *UserResourceMappingClient) SpecificURMSvc(rt influxdb.ResourceType, ut influxdb.UserType) *SpecificURMSvc { + return &SpecificURMSvc{ + Client: s.Client, + rt: rt, + ut: ut, + } +} + +// SpecificURMSvc is a URM client that speaks to a specific resource with a specified user type +type SpecificURMSvc struct { + Client *httpc.Client + rt influxdb.ResourceType + ut influxdb.UserType +} + +// FindUserResourceMappings returns the user resource mappings +func (s *SpecificURMSvc) FindUserResourceMappings(ctx context.Context, f influxdb.UserResourceMappingFilter, opt ...influxdb.FindOptions) ([]*influxdb.UserResourceMapping, int, error) { + var results resourceUsersResponse + err := s.Client. + Get(resourceIDPath(s.rt, f.ResourceID, string(s.ut)+"s")). + DecodeJSON(&results). + Do(ctx) + if err != nil { + return nil, 0, err + } + + urs := make([]*influxdb.UserResourceMapping, len(results.Users)) + for k, item := range results.Users { + urs[k] = &influxdb.UserResourceMapping{ + ResourceID: f.ResourceID, + ResourceType: f.ResourceType, + UserID: item.User.ID, + UserType: item.Role, + } + } + return urs, len(urs), nil +} + +// CreateUserResourceMapping will create a user resource mapping +func (s *SpecificURMSvc) CreateUserResourceMapping(ctx context.Context, m *influxdb.UserResourceMapping) error { + if err := m.Validate(); err != nil { + return err + } + + urlPath := resourceIDPath(s.rt, m.ResourceID, string(s.ut)+"s") + return s.Client. + PostJSON(influxdb.User{ID: m.UserID}, urlPath). + DecodeJSON(m). + Do(ctx) +} + +// DeleteUserResourceMapping will delete user resource mapping based in criteria. +func (s *SpecificURMSvc) DeleteUserResourceMapping(ctx context.Context, resourceID influxdb.ID, userID influxdb.ID) error { + urlPath := resourceIDUserPath(s.rt, resourceID, s.ut, userID) + return s.Client. + Delete(urlPath). + Do(ctx) +} + +func resourceIDPath(resourceType influxdb.ResourceType, resourceID influxdb.ID, p string) string { + return path.Join("/api/v2/", string(resourceType), resourceID.String(), p) +} + +func resourceIDUserPath(resourceType influxdb.ResourceType, resourceID influxdb.ID, userType influxdb.UserType, userID influxdb.ID) string { + return path.Join("/api/v2/", string(resourceType), resourceID.String(), string(userType)+"s", userID.String()) +} diff --git a/tenant/http_client_user.go b/tenant/http_client_user.go index a281dd1f974..e8571fb9d45 100644 --- a/tenant/http_client_user.go +++ b/tenant/http_client_user.go @@ -18,7 +18,7 @@ type UserClientService struct { // FindMe returns user information about the owner of the token func (s *UserClientService) FindMe(ctx context.Context, id influxdb.ID) (*influxdb.User, error) { - var res userResponse + var res UserResponse err := s.Client. Get(prefixMe). DecodeJSON(&res). @@ -31,7 +31,7 @@ func (s *UserClientService) FindMe(ctx context.Context, id influxdb.ID) (*influx // FindUserByID returns a single user by ID. func (s *UserClientService) FindUserByID(ctx context.Context, id influxdb.ID) (*influxdb.User, error) { - var res userResponse + var res UserResponse err := s.Client. Get(prefixUsers, id.String()). DecodeJSON(&res). @@ -105,7 +105,7 @@ func (s *UserClientService) CreateUser(ctx context.Context, u *influxdb.User) er // UpdateUser updates a single user with changeset. // Returns the new user state after update. func (s *UserClientService) UpdateUser(ctx context.Context, id influxdb.ID, upd influxdb.UserUpdate) (*influxdb.User, error) { - var res userResponse + var res UserResponse err := s.Client. PatchJSON(upd, prefixUsers, id.String()). DecodeJSON(&res). diff --git a/tenant/http_handler_urm.go b/tenant/http_handler_urm.go index 15c0cb8b3f7..9e883e7a901 100644 --- a/tenant/http_handler_urm.go +++ b/tenant/http_handler_urm.go @@ -231,13 +231,13 @@ func (h *urmHandler) decodeDeleteRequest(ctx context.Context, r *http.Request) ( type resourceUserResponse struct { Role influxdb.UserType `json:"role"` - *userResponse + *UserResponse } func newResourceUserResponse(u *influxdb.User, userType influxdb.UserType) *resourceUserResponse { return &resourceUserResponse{ Role: userType, - userResponse: newUserResponse(u), + UserResponse: newUserResponse(u), } } diff --git a/tenant/http_handler_urm_test.go b/tenant/http_handler_urm_test.go index 59341479b7f..0834235a5a3 100644 --- a/tenant/http_handler_urm_test.go +++ b/tenant/http_handler_urm_test.go @@ -13,8 +13,10 @@ import ( "github.com/go-chi/chi" "github.com/google/go-cmp/cmp" "github.com/influxdata/influxdb/v2" + ihttp "github.com/influxdata/influxdb/v2/http" "github.com/influxdata/influxdb/v2/mock" "github.com/influxdata/influxdb/v2/tenant" + itesting "github.com/influxdata/influxdb/v2/testing" "go.uber.org/zap/zaptest" ) @@ -369,3 +371,123 @@ func TestUserResourceMappingService_PostMembersHandler(t *testing.T) { } } } + +func TestUserResourceMappingService_Client(t *testing.T) { + type fields struct { + userService influxdb.UserService + userResourceMappingService influxdb.UserResourceMappingService + } + type args struct { + resourceID string + userType influxdb.UserType + user influxdb.User + } + tests := []struct { + name string + fields fields + args args + }{ + { + name: "post members", + fields: fields{ + userService: &mock.UserService{ + FindUserByIDFn: func(ctx context.Context, id influxdb.ID) (*influxdb.User, error) { + return &influxdb.User{ID: id, Name: fmt.Sprintf("user%s", id), Status: influxdb.Active}, nil + }, + }, + userResourceMappingService: &mock.UserResourceMappingService{ + CreateMappingFn: func(ctx context.Context, m *influxdb.UserResourceMapping) error { + return nil + }, + FindMappingsFn: func(ctx context.Context, f influxdb.UserResourceMappingFilter) ([]*influxdb.UserResourceMapping, int, error) { + return []*influxdb.UserResourceMapping{&influxdb.UserResourceMapping{}}, 1, nil + }, + }, + }, + args: args{ + resourceID: "0000000000000099", + user: influxdb.User{ + ID: 1, + Name: "user0000000000000001", + Status: influxdb.Active, + }, + userType: influxdb.Member, + }, + }, + + { + name: "post owners", + fields: fields{ + userService: &mock.UserService{ + FindUserByIDFn: func(ctx context.Context, id influxdb.ID) (*influxdb.User, error) { + return &influxdb.User{ID: id, Name: fmt.Sprintf("user%s", id), Status: influxdb.Active}, nil + }, + }, + userResourceMappingService: &mock.UserResourceMappingService{ + CreateMappingFn: func(ctx context.Context, m *influxdb.UserResourceMapping) error { + return nil + }, + FindMappingsFn: func(ctx context.Context, f influxdb.UserResourceMappingFilter) ([]*influxdb.UserResourceMapping, int, error) { + return []*influxdb.UserResourceMapping{&influxdb.UserResourceMapping{}}, 1, nil + }, + }, + }, + args: args{ + resourceID: "0000000000000099", + user: influxdb.User{ + ID: 2, + Name: "user0000000000000002", + Status: influxdb.Active, + }, + userType: influxdb.Owner, + }, + }, + } + + for _, tt := range tests { + resourceTypes := []influxdb.ResourceType{ + influxdb.BucketsResourceType, + influxdb.DashboardsResourceType, + influxdb.OrgsResourceType, + influxdb.SourcesResourceType, + influxdb.TasksResourceType, + influxdb.TelegrafsResourceType, + influxdb.UsersResourceType, + } + + for _, resourceType := range resourceTypes { + t.Run(tt.name+"_"+string(resourceType), func(t *testing.T) { + // create server + h := tenant.NewURMHandler(zaptest.NewLogger(t), resourceType, "id", tt.fields.userService, tt.fields.userResourceMappingService) + router := chi.NewRouter() + router.Mount(fmt.Sprintf("/api/v2/%s/{id}/members", resourceType), h) + router.Mount(fmt.Sprintf("/api/v2/%s/{id}/owners", resourceType), h) + s := httptest.NewServer(router) + defer s.Close() + ctx := context.Background() + + resourceID := itesting.MustIDBase16(tt.args.resourceID) + urm := &influxdb.UserResourceMapping{ResourceType: resourceType, ResourceID: resourceID, UserType: tt.args.userType, UserID: tt.args.user.ID} + + httpClient, err := ihttp.NewHTTPClient(s.URL, "", false) + if err != nil { + t.Fatal(err) + } + c := tenant.UserResourceMappingClient{Client: httpClient} + err = c.CreateUserResourceMapping(ctx, urm) + + if err != nil { + t.Fatal(err) + } + + _, n, err := c.FindUserResourceMappings(ctx, influxdb.UserResourceMappingFilter{ResourceID: resourceID, ResourceType: resourceType, UserType: tt.args.userType}) + if err != nil { + t.Fatal(err) + } + if n != 1 { + t.Fatalf("expected 1 urm to be created, got: %d", n) + } + }) + } + } +} diff --git a/tenant/http_server_onboarding.go b/tenant/http_server_onboarding.go index 029ce9432dd..022e83b5a55 100644 --- a/tenant/http_server_onboarding.go +++ b/tenant/http_server_onboarding.go @@ -110,7 +110,7 @@ func (h *OnboardHandler) handleOnboardRequest(w http.ResponseWriter, r *http.Req } type onboardingResponse struct { - User *userResponse `json:"user"` + User *UserResponse `json:"user"` Bucket *bucketResponse `json:"bucket"` Organization orgResponse `json:"org"` Auth *authResponse `json:"auth"` diff --git a/tenant/http_server_user.go b/tenant/http_server_user.go index 85b343938da..49ce656d616 100644 --- a/tenant/http_server_user.go +++ b/tenant/http_server_user.go @@ -351,7 +351,7 @@ func decodeDeleteUserRequest(ctx context.Context, r *http.Request) (*deleteUserR type usersResponse struct { Links map[string]string `json:"links"` - Users []*userResponse `json:"users"` + Users []*UserResponse `json:"users"` } func (us usersResponse) ToInfluxdb() []*influxdb.User { @@ -367,7 +367,7 @@ func newUsersResponse(users []*influxdb.User) *usersResponse { Links: map[string]string{ "self": "/api/v2/users", }, - Users: []*userResponse{}, + Users: []*UserResponse{}, } for _, user := range users { res.Users = append(res.Users, newUserResponse(user)) @@ -375,14 +375,14 @@ func newUsersResponse(users []*influxdb.User) *usersResponse { return &res } -// userResponse is the response of user -type userResponse struct { +// UserResponse is the response of user +type UserResponse struct { Links map[string]string `json:"links"` influxdb.User } -func newUserResponse(u *influxdb.User) *userResponse { - return &userResponse{ +func newUserResponse(u *influxdb.User) *UserResponse { + return &UserResponse{ Links: map[string]string{ "self": fmt.Sprintf("/api/v2/users/%s", u.ID), }, diff --git a/tenant/service.go b/tenant/service.go index 0000acf6d4a..a15b38e1ea3 100644 --- a/tenant/service.go +++ b/tenant/service.go @@ -67,7 +67,7 @@ func (ts *Service) NewOrgHTTPHandler(log *zap.Logger, secretSvc influxdb.SecretS } func (ts *Service) NewBucketHTTPHandler(log *zap.Logger, labelSvc influxdb.LabelService) *BucketHandler { - urmHandler := NewURMHandler(log.With(zap.String("handler", "urm")), influxdb.OrgsResourceType, "id", ts.UserService, NewAuthedURMService(ts.OrganizationService, ts.UserResourceMappingService)) + urmHandler := NewURMHandler(log.With(zap.String("handler", "urm")), influxdb.BucketsResourceType, "id", ts.UserService, NewAuthedURMService(ts.OrganizationService, ts.UserResourceMappingService)) labelHandler := label.NewHTTPEmbeddedHandler(log.With(zap.String("handler", "label")), influxdb.BucketsResourceType, labelSvc) return NewHTTPBucketHandler(log.With(zap.String("handler", "bucket")), NewAuthedBucketService(ts.BucketService), labelSvc, urmHandler, labelHandler) } From efd82a24831aa2ff183a7ae499ba1a3790037d2c Mon Sep 17 00:00:00 2001 From: Johnny Steenbergen Date: Wed, 5 Aug 2020 09:01:58 -0700 Subject: [PATCH 12/13] feat(influx): add support for dashboards --- CHANGELOG.md | 1 + cmd/influx/dashboard.go | 154 +++++++++++++++++++++++++++++ cmd/influx/main.go | 1 + cmd/influx/telegraf.go | 2 +- cmd/influxd/launcher/pkger_test.go | 2 +- 5 files changed, 158 insertions(+), 2 deletions(-) create mode 100644 cmd/influx/dashboard.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 2289fd18331..499b7c523e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ 1. [19075](https://github.com/influxdata/influxdb/pull/19075): Add resource links to a stack's resources from public HTTP API list/read calls 1. [19103](https://github.com/influxdata/influxdb/pull/19103): Enhance resource creation experience when limits are reached +1. [19223](https://github.com/influxdata/influxdb/pull/19223): Add dashboards command to influx CLI ### Bug Fixes diff --git a/cmd/influx/dashboard.go b/cmd/influx/dashboard.go new file mode 100644 index 00000000000..e824ba2f5dd --- /dev/null +++ b/cmd/influx/dashboard.go @@ -0,0 +1,154 @@ +package main + +import ( + "context" + + "github.com/influxdata/influxdb/v2" + "github.com/influxdata/influxdb/v2/cmd/influx/internal" + "github.com/influxdata/influxdb/v2/http" + "github.com/influxdata/influxdb/v2/tenant" + "github.com/spf13/cobra" +) + +func cmdDashboard(f *globalFlags, opts genericCLIOpts) *cobra.Command { + return newCmdDashboardBuilder(newDashboardSVCs, f, opts).cmdDashboards() +} + +type dashboardSVCsFn func() (influxdb.DashboardService, influxdb.OrganizationService, error) + +type cmdDashboardBuilder struct { + genericCLIOpts + *globalFlags + + svcFn dashboardSVCsFn + + ids []string + org organization +} + +func newCmdDashboardBuilder(svcFn dashboardSVCsFn, f *globalFlags, opts genericCLIOpts) *cmdDashboardBuilder { + return &cmdDashboardBuilder{ + genericCLIOpts: opts, + globalFlags: f, + svcFn: svcFn, + } +} + +func (b *cmdDashboardBuilder) cmdDashboards() *cobra.Command { + cmd := b.newCmd("dashboards", b.listRunE) + cmd.Short = "List Dashboard(s)." + cmd.Long = ` + List Dashboard(s). + + Examples: + # list all known Dashboards + influx dashboards + + # list all known Dashboards matching ids + influx dashboards --id $ID1 --id $ID2 + + # list all known Dashboards matching ids shorts + influx dashboards -i $ID1 -i $ID2 +` + + b.org.register(cmd, false) + cmd.Flags().StringArrayVarP(&b.ids, "id", "i", nil, "Dashboard ID to retrieve.") + + return cmd +} + +func (b *cmdDashboardBuilder) listRunE(cmd *cobra.Command, args []string) error { + svc, orgSVC, err := b.svcFn() + if err != nil { + return err + } + + orgID, _ := b.org.getID(orgSVC) + if orgID == 0 && len(b.ids) == 0 { + return &influxdb.Error{ + Code: influxdb.EUnprocessableEntity, + Msg: "at least one of org, org-id, or id must be provided", + } + } + + var ids []*influxdb.ID + for _, rawID := range b.ids { + id, err := influxdb.IDFromString(rawID) + if err != nil { + return err + } + ids = append(ids, id) + } + + var ( + out []*influxdb.Dashboard + offset int + ) + const limit = 100 + for { + dashboards, _, err := svc.FindDashboards(context.Background(), influxdb.DashboardFilter{ + IDs: ids, + OrganizationID: &orgID, + }, influxdb.FindOptions{ + Limit: limit, + Offset: offset, + }) + if err != nil && influxdb.ErrorCode(err) != influxdb.ENotFound { + return err + } + out = append(out, dashboards...) + if len(dashboards) < limit { + break + } + offset += len(dashboards) + } + + return b.writeDashboards(out...) +} + +func (b *cmdDashboardBuilder) writeDashboards(dashboards ...*influxdb.Dashboard) error { + if b.json { + return b.writeJSON(dashboards) + } + + tabW := b.newTabWriter() + defer tabW.Flush() + + writeDashboardRows(tabW, dashboards...) + return nil +} + +func writeDashboardRows(tabW *internal.TabWriter, dashboards ...*influxdb.Dashboard) { + tabW.WriteHeaders("ID", "OrgID", "Name", "Description", "Num Cells") + for _, d := range dashboards { + tabW.Write(map[string]interface{}{ + "ID": d.ID, + "OrgID": d.OrganizationID.String(), + "Name": d.Name, + "Description": d.Description, + "Num Cells": len(d.Cells), + }) + } +} + +func (b *cmdDashboardBuilder) newCmd(use string, runE func(*cobra.Command, []string) error) *cobra.Command { + cmd := b.genericCLIOpts.newCmd(use, runE, true) + b.genericCLIOpts.registerPrintOptions(cmd) + b.globalFlags.registerFlags(cmd) + return cmd +} + +func newDashboardSVCs() (influxdb.DashboardService, influxdb.OrganizationService, error) { + httpClient, err := newHTTPClient() + if err != nil { + return nil, nil, err + } + + orgSVC := &tenant.OrgClientService{ + Client: httpClient, + } + dashSVC := &http.DashboardService{ + Client: httpClient, + } + return dashSVC, orgSVC, nil +} diff --git a/cmd/influx/main.go b/cmd/influx/main.go index 21c7cd74cab..24dc1a83872 100644 --- a/cmd/influx/main.go +++ b/cmd/influx/main.go @@ -293,6 +293,7 @@ func influxCmd(opts ...genericCLIOptFn) *cobra.Command { cmdBackup, cmdBucket, cmdConfig, + cmdDashboard, cmdDelete, cmdExport, cmdOrganization, diff --git a/cmd/influx/telegraf.go b/cmd/influx/telegraf.go index c8ac6759dd4..2eb39139708 100644 --- a/cmd/influx/telegraf.go +++ b/cmd/influx/telegraf.go @@ -280,7 +280,7 @@ func writeTelegrafRows(tabW *internal.TabWriter, cfgs ...*influxdb.TelegrafConfi for _, cfg := range cfgs { tabW.Write(map[string]interface{}{ "ID": cfg.ID, - "OrgID": cfg.OrgID, + "OrgID": cfg.OrgID.String(), "Name": cfg.Name, "Description": cfg.Description, }) diff --git a/cmd/influxd/launcher/pkger_test.go b/cmd/influxd/launcher/pkger_test.go index 722ce13f74f..7cd57576a33 100644 --- a/cmd/influxd/launcher/pkger_test.go +++ b/cmd/influxd/launcher/pkger_test.go @@ -3432,7 +3432,7 @@ spec: }, varArgs.Values) }) - t.Run("error incurs during template application when resources already exist rollsback to prev state", func(t *testing.T) { + t.Run("error incurred during template application when resources already exist rollsback to prev state", func(t *testing.T) { updatePkg, err := pkger.Parse(pkger.EncodingYAML, pkger.FromString(updatePkgYMLStr)) require.NoError(t, err) From fe5e57934ede3019936b4ec98d455f772b9ccb07 Mon Sep 17 00:00:00 2001 From: alexpaxton Date: Wed, 5 Aug 2020 09:24:17 -0700 Subject: [PATCH 13/13] feat(flows): use global time range instead of bucket time (#19215) * feat: replace bucket time selector with global time range * refactor: replace custom list with List component --- ui/src/notebooks/components/header/Submit.tsx | 4 +- .../notebooks/pipes/Data/BucketSelector.tsx | 23 +++++++---- .../notebooks/pipes/Data/SelectorListItem.tsx | 28 ------------- ui/src/notebooks/pipes/Data/TimeSelector.tsx | 39 ------------------- ui/src/notebooks/pipes/Data/index.ts | 2 - ui/src/notebooks/pipes/Data/style.scss | 4 +- ui/src/notebooks/pipes/Data/view.tsx | 2 - 7 files changed, 19 insertions(+), 83 deletions(-) delete mode 100644 ui/src/notebooks/pipes/Data/SelectorListItem.tsx delete mode 100644 ui/src/notebooks/pipes/Data/TimeSelector.tsx diff --git a/ui/src/notebooks/components/header/Submit.tsx b/ui/src/notebooks/components/header/Submit.tsx index 4e4a7a8de26..1d9c88624e1 100644 --- a/ui/src/notebooks/components/header/Submit.tsx +++ b/ui/src/notebooks/components/header/Submit.tsx @@ -79,9 +79,9 @@ export const Submit: FC = () => { requirements, }) } else if (pipe.type === 'data') { - const {bucketName, timeStart, timeStop} = pipe + const {bucketName} = pipe - const text = `from(bucket: "${bucketName}")|>range(start: ${timeStart}, stop: ${timeStop})` + const text = `from(bucket: "${bucketName}")|>range(start: v.timeRangeStart, stop: v.timeRangeStop)` stages.push({ text, diff --git a/ui/src/notebooks/pipes/Data/BucketSelector.tsx b/ui/src/notebooks/pipes/Data/BucketSelector.tsx index fb37e6a3ac6..e4ad5cd0096 100644 --- a/ui/src/notebooks/pipes/Data/BucketSelector.tsx +++ b/ui/src/notebooks/pipes/Data/BucketSelector.tsx @@ -3,12 +3,13 @@ import React, {FC, useEffect, useContext, useCallback} from 'react' // Components import { - DapperScrollbars, TechnoSpinner, ComponentSize, RemoteDataState, + InfluxColors, + List, + Gradients, } from '@influxdata/clockface' -import SelectorListItem from 'src/notebooks/pipes/Data/SelectorListItem' import {BucketContext} from 'src/notebooks/context/buckets' import {PipeContext} from 'src/notebooks/context/pipe' @@ -56,17 +57,25 @@ const BucketSelector: FC = () => { if (loading === RemoteDataState.Done && selectedBucketName) { body = ( - + {buckets.map(bucket => ( - + title={bucket.name} + gradient={Gradients.GundamPilot} + wrapText={true} + > + + {bucket.name} + ))} - + ) } diff --git a/ui/src/notebooks/pipes/Data/SelectorListItem.tsx b/ui/src/notebooks/pipes/Data/SelectorListItem.tsx deleted file mode 100644 index c18351fddac..00000000000 --- a/ui/src/notebooks/pipes/Data/SelectorListItem.tsx +++ /dev/null @@ -1,28 +0,0 @@ -// Libraries -import React, {FC} from 'react' -import classnames from 'classnames' - -interface Props { - value: any - onClick: (value: any) => void - selected: boolean - text: string -} - -const SelectorListItem: FC = ({value, onClick, selected, text}) => { - const className = classnames('data-source--list-item', { - 'data-source--list-item__selected': selected, - }) - - const handleClick = (): void => { - onClick(value) - } - - return ( -
- {text} -
- ) -} - -export default SelectorListItem diff --git a/ui/src/notebooks/pipes/Data/TimeSelector.tsx b/ui/src/notebooks/pipes/Data/TimeSelector.tsx deleted file mode 100644 index a847e32427d..00000000000 --- a/ui/src/notebooks/pipes/Data/TimeSelector.tsx +++ /dev/null @@ -1,39 +0,0 @@ -// Libraries -import React, {FC, useContext} from 'react' - -// Components -import {DapperScrollbars} from '@influxdata/clockface' -import SelectorListItem from 'src/notebooks/pipes/Data/SelectorListItem' -import {PipeContext} from 'src/notebooks/context/pipe' - -// Constants -import {SELECTABLE_TIME_RANGES} from 'src/shared/constants/timeRanges' - -const TimeSelector: FC = () => { - const {data, update} = useContext(PipeContext) - - const timeStart = data.timeStart - - const updateTimeRange = (duration: string): void => { - update({timeStart: duration}) - } - - return ( -
-
Time Range
- - {SELECTABLE_TIME_RANGES.map(range => ( - - ))} - -
- ) -} - -export default TimeSelector diff --git a/ui/src/notebooks/pipes/Data/index.ts b/ui/src/notebooks/pipes/Data/index.ts index 2fc76f3dd39..acfd238c3bb 100644 --- a/ui/src/notebooks/pipes/Data/index.ts +++ b/ui/src/notebooks/pipes/Data/index.ts @@ -10,7 +10,5 @@ register({ button: 'Bucket', initial: { bucketName: '', - timeStart: '-1h', - timeStop: 'now()', }, }) diff --git a/ui/src/notebooks/pipes/Data/style.scss b/ui/src/notebooks/pipes/Data/style.scss index 0b0dd1ba278..9b2627b8fb0 100644 --- a/ui/src/notebooks/pipes/Data/style.scss +++ b/ui/src/notebooks/pipes/Data/style.scss @@ -7,7 +7,7 @@ } .data-source--block { - flex: 1 0 220px; + flex: 1 0 350px; display: flex; flex-direction: column; align-items: stretch; @@ -27,8 +27,6 @@ .data-source--list, .data-source--list__empty { - background-color: $g1-raven; - border-radius: $cf-radius; flex: 1 0 0; } diff --git a/ui/src/notebooks/pipes/Data/view.tsx b/ui/src/notebooks/pipes/Data/view.tsx index de409c7cdb0..ed5f325cae8 100644 --- a/ui/src/notebooks/pipes/Data/view.tsx +++ b/ui/src/notebooks/pipes/Data/view.tsx @@ -6,7 +6,6 @@ import {PipeProp} from 'src/notebooks' // Components import BucketSelector from 'src/notebooks/pipes/Data/BucketSelector' -import TimeSelector from 'src/notebooks/pipes/Data/TimeSelector' import {FlexBox, ComponentSize} from '@influxdata/clockface' import BucketProvider from 'src/notebooks/context/buckets' @@ -23,7 +22,6 @@ const DataSource: FC = ({Context}) => { className="data-source" > -