From acdc2bf100348530d7f8630d78da85434c8be8d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Guimar=C3=A3es?= Date: Fri, 15 Jun 2018 10:11:32 -0300 Subject: [PATCH 001/104] Adding Cloudwatch AWS/AppSync metrics and dimensions --- pkg/tsdb/cloudwatch/metric_find_query.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index 136ee241c2e5a..12c2aba4681b4 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -86,6 +86,7 @@ func init() { "AWS/Kinesis": {"GetRecords.Bytes", "GetRecords.IteratorAge", "GetRecords.IteratorAgeMilliseconds", "GetRecords.Latency", "GetRecords.Records", "GetRecords.Success", "IncomingBytes", "IncomingRecords", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Success", "PutRecords.Bytes", "PutRecords.Latency", "PutRecords.Records", "PutRecords.Success", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "IteratorAgeMilliseconds", "OutgoingBytes", "OutgoingRecords"}, "AWS/KinesisAnalytics": {"Bytes", "MillisBehindLatest", "Records", "Success"}, "AWS/Lambda": {"Invocations", "Errors", "Duration", "Throttles", "IteratorAge"}, + "AWS/AppSync": {"Latency", "4XXError", "5XXError"}, "AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"}, "AWS/ML": {"PredictCount", "PredictFailureCount"}, "AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"}, @@ -135,6 +136,7 @@ func init() { "AWS/Kinesis": {"StreamName", "ShardId"}, "AWS/KinesisAnalytics": {"Flow", "Id", "Application"}, "AWS/Lambda": {"FunctionName", "Resource", "Version", "Alias"}, + "AWS/AppSync": {"GraphQLAPIId"}, "AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"}, "AWS/ML": {"MLModelId", "RequestMode"}, "AWS/NATGateway": {"NatGatewayId"}, From daf0c374b363d81d2ff36f44317a64279b31aa3b Mon Sep 17 00:00:00 2001 From: "Bryan T. Richardson" Date: Tue, 10 Jul 2018 10:11:39 -0600 Subject: [PATCH 002/104] Added BurstBalance metric to list of AWS RDS metrics. --- pkg/tsdb/cloudwatch/metric_find_query.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index 136ee241c2e5a..e8e2c894120bb 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -92,7 +92,7 @@ func init() { "AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"}, "AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"}, "AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"}, - "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, + "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, "AWS/Route53": {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, "AWS/S3": {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"}, "AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send"}, From 0b421004ea3a41aa73fba414010ecbe5fa687f20 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Fri, 20 Jul 2018 09:59:04 +0200 Subject: [PATCH 003/104] built a component for delete button in tables, instead of using a modal to confirm it now does it in the row of the table, created a sass file for the component, the component uses css transitions for animation --- public/app/containers/Teams/TeamList.tsx | 19 +---- .../components/DeleteButton/DeleteButton.tsx | 78 +++++++++++++++++++ public/sass/_grafana.scss | 1 + public/sass/components/_delete_button.scss | 49 ++++++++++++ 4 files changed, 131 insertions(+), 16 deletions(-) create mode 100644 public/app/core/components/DeleteButton/DeleteButton.tsx create mode 100644 public/sass/components/_delete_button.scss diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index 4429764b1cc94..475f8762c69e0 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -6,6 +6,7 @@ import { NavStore } from 'app/stores/NavStore/NavStore'; import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; import { BackendSrv } from 'app/core/services/backend_srv'; import appEvents from 'app/core/app_events'; +import DeleteButton from 'app/core/components/DeleteButton/DeleteButton'; interface Props { nav: typeof NavStore.Type; @@ -28,18 +29,6 @@ export class TeamList extends React.Component { } deleteTeam(team: ITeam) { - appEvents.emit('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete Team ' + team.name + '?', - yesText: 'Delete', - icon: 'fa-warning', - onConfirm: () => { - this.deleteTeamConfirmed(team); - }, - }); - } - - deleteTeamConfirmed(team) { this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); } @@ -67,9 +56,7 @@ export class TeamList extends React.Component { {team.memberCount} - this.deleteTeam(team)} className="btn btn-danger btn-small"> - - + this.deleteTeam(team)} /> ); @@ -102,7 +89,7 @@ export class TeamList extends React.Component { -
+
diff --git a/public/app/core/components/DeleteButton/DeleteButton.tsx b/public/app/core/components/DeleteButton/DeleteButton.tsx new file mode 100644 index 0000000000000..61a322b591eb4 --- /dev/null +++ b/public/app/core/components/DeleteButton/DeleteButton.tsx @@ -0,0 +1,78 @@ +import React, { Component } from 'react'; + +export default class DeleteButton extends Component { + state = { + deleteButton: 'delete-button--show', + confirmSpan: 'confirm-delete--removed', + }; + + handleDelete = event => { + if (event) { + event.preventDefault(); + } + + this.setState({ + deleteButton: 'delete-button--hide', + }); + + setTimeout(() => { + this.setState({ + deleteButton: 'delete-button--removed', + }); + }, 100); + + setTimeout(() => { + this.setState({ + confirmSpan: 'confirm-delete--hide', + }); + }, 100); + + setTimeout(() => { + this.setState({ + confirmSpan: 'confirm-delete--show', + }); + }, 150); + }; + + cancelDelete = event => { + event.preventDefault(); + + this.setState({ + confirmSpan: 'confirm-delete--hide', + }); + + setTimeout(() => { + this.setState({ + confirmSpan: 'confirm-delete--removed', + deleteButton: 'delete-button--hide', + }); + }, 140); + + setTimeout(() => { + this.setState({ + deleteButton: 'delete-button--show', + }); + }, 190); + }; + + render() { + const { confirmDelete } = this.props; + return ( + + + + + + + + Cancel + + + Confirm Delete + + + + + ); + } +} diff --git a/public/sass/_grafana.scss b/public/sass/_grafana.scss index 9e3bec267edf3..3a72bd45a1a8d 100644 --- a/public/sass/_grafana.scss +++ b/public/sass/_grafana.scss @@ -93,6 +93,7 @@ @import 'components/form_select_box'; @import 'components/user-picker'; @import 'components/description-picker'; +@import 'components/delete_button'; // PAGES @import 'pages/login'; diff --git a/public/sass/components/_delete_button.scss b/public/sass/components/_delete_button.scss new file mode 100644 index 0000000000000..19f32189d81c7 --- /dev/null +++ b/public/sass/components/_delete_button.scss @@ -0,0 +1,49 @@ +.delete-button-container { + max-width: 24px; + width: 24px; + direction: rtl; + max-height: 38px; + display: block; +} + +.confirm-delete-container { + overflow: hidden; + width: 145px; + display: block; +} + +.delete-button { + &--show { + display: inline-block; + opacity: 1; + transition: opacity 0.1s ease; + } + + &--hide { + display: inline-block; + opacity: 0; + transition: opacity 0.1s ease; + } + &--removed { + display: none; + } +} + +.confirm-delete { + &--show { + display: inline-block; + opacity: 1; + transition: opacity 0.08s ease-out, transform 0.1s ease-out; + transform: translateX(0); + } + + &--hide { + display: inline-block; + opacity: 0; + transition: opacity 0.12s ease-in, transform 0.14s ease-in; + transform: translateX(100px); + } + &--removed { + display: none; + } +} From b8a4b7771ae72660fd16022920265750ce42e073 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Fri, 20 Jul 2018 11:09:24 +0200 Subject: [PATCH 004/104] removed import appEvents --- public/app/containers/Teams/TeamList.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index 475f8762c69e0..87d24f8ddd4c3 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -5,7 +5,6 @@ import PageHeader from 'app/core/components/PageHeader/PageHeader'; import { NavStore } from 'app/stores/NavStore/NavStore'; import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; import { BackendSrv } from 'app/core/services/backend_srv'; -import appEvents from 'app/core/app_events'; import DeleteButton from 'app/core/components/DeleteButton/DeleteButton'; interface Props { From b6909eb3b00b10bfbb72eb4495cb89e0e7a625ca Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Fri, 20 Jul 2018 16:02:41 +0200 Subject: [PATCH 005/104] removed blue-dark variable with blue-light in light-theme, blue variable now has same value as blue-dark had before, should fix issue with any low contrast issues with blue in light-theme, this made query-blue variable unnecessery removed it, added variable for variable dropdown highlight background --- public/sass/_variables.dark.scss | 4 +++- public/sass/_variables.light.scss | 24 ++++++++++++----------- public/sass/components/_query_editor.scss | 6 +++--- public/sass/components/_slate_editor.scss | 2 +- public/sass/components/_submenu.scss | 2 +- public/sass/components/_timepicker.scss | 2 +- 6 files changed, 22 insertions(+), 18 deletions(-) diff --git a/public/sass/_variables.dark.scss b/public/sass/_variables.dark.scss index eb73b014a9376..01590ace5859d 100644 --- a/public/sass/_variables.dark.scss +++ b/public/sass/_variables.dark.scss @@ -44,7 +44,6 @@ $brand-success: $green; $brand-warning: $brand-primary; $brand-danger: $red; -$query-blue: $blue; $query-red: $red; $query-green: $green; $query-purple: $purple; @@ -347,3 +346,6 @@ $diff-json-changed-fg: $gray-5; $diff-json-changed-num: $text-color; $diff-json-icon: $gray-7; + +//Submenu +$variable-option-bg: $blue-dark; diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index 7e5e1b6a7f8b4..b6e9e7db979bd 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -30,8 +30,8 @@ $white: #fff; // Accent colors // ------------------------- -$blue: #61c2f2; -$blue-dark: #0083b3; +$blue: #0083b3; +$blue-light: #00a8e6; $green: #3aa655; $red: #d44939; $yellow: #ff851b; @@ -45,7 +45,6 @@ $brand-success: $green; $brand-warning: $orange; $brand-danger: $red; -$query-blue: $blue-dark; $query-red: $red; $query-green: $green; $query-purple: $purple; @@ -82,7 +81,7 @@ $page-gradient: linear-gradient(-60deg, $gray-7, #f5f6f9 70%, $gray-7 98%); $link-color: $gray-1; $link-color-disabled: lighten($link-color, 30%); $link-hover-color: darken($link-color, 20%); -$external-link-color: $blue; +$external-link-color: $blue-light; // Typography // ------------------------- @@ -150,8 +149,8 @@ $scrollbarBorder: $gray-4; $btn-primary-bg: $brand-primary; $btn-primary-bg-hl: lighten($brand-primary, 8%); -$btn-secondary-bg: $blue-dark; -$btn-secondary-bg-hl: lighten($blue-dark, 4%); +$btn-secondary-bg: $blue; +$btn-secondary-bg-hl: lighten($blue, 4%); $btn-success-bg: lighten($green, 3%); $btn-success-bg-hl: darken($green, 3%); @@ -168,7 +167,7 @@ $btn-inverse-text-color: $gray-1; $btn-inverse-text-shadow: 0 1px 0 rgba(255, 255, 255, 0.4); $btn-active-bg: $white; -$btn-active-text-color: $blue-dark; +$btn-active-text-color: $blue; $btn-link-color: $gray-1; @@ -220,7 +219,7 @@ $search-filter-box-bg: $gray-7; // Typeahead $typeahead-shadow: 0 5px 10px 0 $gray-5; $typeahead-selected-bg: lighten($blue, 25%); -$typeahead-selected-color: $blue-dark; +$typeahead-selected-color: $blue; // Dropdowns // ------------------------- @@ -285,7 +284,7 @@ $info-text-color: $blue; $alert-error-bg: linear-gradient(90deg, #d44939, #e04d3d); $alert-success-bg: linear-gradient(90deg, #3aa655, #47b274); $alert-warning-bg: linear-gradient(90deg, #d44939, #e04d3d); -$alert-info-bg: $blue-dark; +$alert-info-bg: $blue; // popover $popover-bg: $page-bg; @@ -293,7 +292,7 @@ $popover-color: $text-color; $popover-border-color: $gray-5; $popover-shadow: 0 0 20px $white; -$popover-help-bg: $blue-dark; +$popover-help-bg: $blue; $popover-help-color: $gray-6; $popover-error-bg: $btn-danger-bg; @@ -310,7 +309,7 @@ $graph-tooltip-bg: $gray-5; $checkboxImageUrl: '../img/checkbox_white.png'; // info box -$info-box-background: linear-gradient(100deg, $blue-dark, darken($blue-dark, 5%)); +$info-box-background: linear-gradient(100deg, $blue, darken($blue, 5%)); $info-box-color: $gray-7; // footer @@ -356,3 +355,6 @@ $diff-json-new: #664e33; $diff-json-changed-fg: $gray-6; $diff-json-changed-num: $gray-4; $diff-json-icon: $gray-4; + +//Submenu +$variable-option-bg: $blue-light; diff --git a/public/sass/components/_query_editor.scss b/public/sass/components/_query_editor.scss index 6b2860d57bf9f..9fcfdf719ba5c 100644 --- a/public/sass/components/_query_editor.scss +++ b/public/sass/components/_query_editor.scss @@ -1,11 +1,11 @@ .query-keyword { font-weight: $font-weight-semi-bold; - color: $query-blue; + color: $blue; } .gf-form-disabled { .query-keyword { - color: darken($query-blue, 20%); + color: darken($blue, 20%); } } @@ -63,7 +63,7 @@ } .gf-form-query-letter-cell-letter { font-weight: bold; - color: $query-blue; + color: $blue; } .gf-form-query-letter-cell-ds { color: $text-color-weak; diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index de8a6e6d72149..119c468292a48 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -122,7 +122,7 @@ .token.attr-value, .token.keyword, .token.class-name { - color: $query-blue; + color: $blue; } .token.regex, diff --git a/public/sass/components/_submenu.scss b/public/sass/components/_submenu.scss index 0027e0b19999f..1efd275bfadc7 100644 --- a/public/sass/components/_submenu.scss +++ b/public/sass/components/_submenu.scss @@ -138,7 +138,7 @@ .variable-option { &:hover, &.highlighted { - background-color: $blue-dark; + background-color: $variable-option-bg; } } diff --git a/public/sass/components/_timepicker.scss b/public/sass/components/_timepicker.scss index e4d8f4555e066..e12835d31c199 100644 --- a/public/sass/components/_timepicker.scss +++ b/public/sass/components/_timepicker.scss @@ -77,7 +77,7 @@ border: none; color: $text-color; &.active span { - color: $query-blue; + color: $blue; font-weight: bold; } .text-info { From 26aa575cb4208a0fa36fde074bc3f26eb9d3f56e Mon Sep 17 00:00:00 2001 From: yogyrahmawan Date: Sun, 22 Jul 2018 08:04:57 +0700 Subject: [PATCH 006/104] escaping ssl mode on postgres param --- pkg/tsdb/postgres/postgres.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index fdf09216e5180..5ca333fe63357 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -53,7 +53,11 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - u := &url.URL{Scheme: "postgres", User: url.UserPassword(datasource.User, password), Host: datasource.Url, Path: datasource.Database, RawQuery: "sslmode=" + sslmode} + u := &url.URL{Scheme: "postgres", + User: url.UserPassword(datasource.User, password), + Host: datasource.Url, Path: datasource.Database, + RawQuery: "sslmode=" + url.QueryEscape(sslmode)} + return u.String() } From 46e31621b071e36f658788c5b8f9c9ab11ca1aab Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:28:17 +0200 Subject: [PATCH 007/104] Add jest file --- .../influxdb/specs/query_ctrl.jest.ts | 211 ++++++++++++++++++ 1 file changed, 211 insertions(+) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 0000000000000..e4dd5b226f415 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,211 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +// import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + let uiSegmentSrv = { + newPlusButton: () => {}, + }; + + let ctx = { + dataSource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }, + }; + + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [{}], + }, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // ctx.ctrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + beforeEach(() => { + ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + }); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[0].key).toBe('asd'); + expect(ctx.ctrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); + expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); + expect(ctx.ctrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); From bb0af52d34b201a960d3ace19a54e1b44be8748b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:54:58 +0200 Subject: [PATCH 008/104] Figuring out why it doesn't initialize --- .../app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index e4dd5b226f415..c3b8d3ae20d6a 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -16,8 +16,9 @@ describe('InfluxDBQueryCtrl', function() { }; InfluxQueryCtrl.prototype.panelCtrl = { + target: { target: {} }, panel: { - targets: [{}], + targets: [this.target], }, }; From 816ee82d2695157cbd969f43623ae686b683f08d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:25:59 +0200 Subject: [PATCH 009/104] Add docs about global variables in query template variables --- docs/sources/features/datasources/prometheus.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 4ff0baee1085a..190220fb0f17c 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -75,6 +75,9 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). + +It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. + ### Using variables in queries There are two syntaxes: From 70575c8f7816f90b074d7f65226b70e334786958 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:34:03 +0200 Subject: [PATCH 010/104] Add templating docs for --- docs/sources/reference/templating.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index efe9db61e3deb..08a142d363691 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -273,6 +273,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. +### The $__range Variable +Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. + ## Repeating Panels Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want From d9bf89438325c01a0fe5f3205b4cefff25930c40 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Tue, 24 Jul 2018 16:58:48 +0900 Subject: [PATCH 011/104] return 400 if user input error --- pkg/api/metrics.go | 2 +- pkg/tsdb/cloudwatch/cloudwatch.go | 21 +++++++++++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index c1b8ffe595e37..00ad25ab8c2f9 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -52,7 +52,7 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { if res.Error != nil { res.ErrorString = res.Error.Error() resp.Message = res.ErrorString - statusCode = 500 + statusCode = 400 } } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 38fbac3aa292d..4af73fc2ba9f9 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -17,6 +17,7 @@ import ( "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" @@ -100,7 +101,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - return nil, err + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: err, + } + return result, nil } query.RefId = queryContext.Queries[i].RefId @@ -113,15 +117,21 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo } if query.Id == "" && query.Expression != "" { - return nil, fmt.Errorf("Invalid query: id should be set if using expression") + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: fmt.Errorf("Invalid query: id should be set if using expression"), + } + return result, nil } eg.Go(func() error { queryRes, err := e.executeQuery(ectx, query, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } return nil }) } @@ -131,11 +141,14 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo q := getMetricDataQuery eg.Go(func() error { queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } for _, queryRes := range queryResponses { result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } } return nil }) From 59c17053990203e6f303b5dfbdb3aa4b20611e75 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Tue, 24 Jul 2018 10:34:11 +0200 Subject: [PATCH 012/104] docs: mentation that config changes requires restart. --- docs/sources/installation/configuration.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index e3db7a1d60b31..2a799b044b300 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -15,6 +15,8 @@ weight = 1 The Grafana back-end has a number of configuration options that can be specified in a `.ini` configuration file or specified using environment variables. +> **Note.** Grafana needs to be restarted for any configuration changes to take effect. + ## Comments In .ini Files Semicolons (the `;` char) are the standard way to comment out lines in a `.ini` file. From 93e73919e814b6d583aa1f3666c22cf922faaa55 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:03:46 +0200 Subject: [PATCH 013/104] fix code style --- pkg/tsdb/postgres/postgres.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 5ca333fe63357..f19e4fb54f4e7 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -53,10 +53,12 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - u := &url.URL{Scheme: "postgres", - User: url.UserPassword(datasource.User, password), - Host: datasource.Url, Path: datasource.Database, - RawQuery: "sslmode=" + url.QueryEscape(sslmode)} + u := &url.URL{ + Scheme: "postgres", + User: url.UserPassword(datasource.User, password), + Host: datasource.Url, Path: datasource.Database, + RawQuery: "sslmode=" + url.QueryEscape(sslmode), + } return u.String() } From 35efb7c225ae35758ab1826e7ad0012f5ddf46a8 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:26:09 +0200 Subject: [PATCH 014/104] changelog: add notes about closing #12644 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58570c89c1866..160aab9b91a3b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) From 81c32780b905fa92ab874e4fac86395f0155f14a Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 11:27:53 +0200 Subject: [PATCH 015/104] Pass more tests --- .../plugins/datasource/influxdb/query_ctrl.ts | 1 - .../influxdb/specs/query_ctrl.jest.ts | 110 ++++++++++-------- 2 files changed, 60 insertions(+), 51 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f4589e..2be1ecc7bff1d 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -22,7 +22,6 @@ export class InfluxQueryCtrl extends QueryCtrl { /** @ngInject **/ constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) { super($scope, $injector); - this.target = this.target; this.queryModel = new InfluxQuery(this.target, templateSrv, this.panel.scopedVars); this.queryBuilder = new InfluxQueryBuilder(this.target, this.datasource.database); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index c3b8d3ae20d6a..139efbc3afab7 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -4,29 +4,28 @@ import 'app/core/services/segment_srv'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; -describe('InfluxDBQueryCtrl', function() { +describe('InfluxDBQueryCtrl', () => { let uiSegmentSrv = { newPlusButton: () => {}, - }; - - let ctx = { - dataSource: { - metricFindQuery: jest.fn(() => Promise.resolve([])), + newKey: key => key, + newKeyValue: key => key, + newSegment: seg => seg, + newSelectMeasurement: () => { + return { value: 'select measurement' }; }, + newOperator: op => op, + newFake: () => {}, }; - InfluxQueryCtrl.prototype.panelCtrl = { - target: { target: {} }, - panel: { - targets: [this.target], - }, + let ctx = { + dataSource: {}, }; // beforeEach(angularMocks.module('grafana.core')); // beforeEach(angularMocks.module('grafana.controllers')); // beforeEach(angularMocks.module('grafana.services')); // beforeEach( - // angularMocks.module(function($compileProvider) { + // angularMocks.module(($ =>compileProvider) { // $compileProvider.preAssignBindingsEnabled(true); // }) // ); @@ -56,147 +55,158 @@ describe('InfluxDBQueryCtrl', function() { // }) // ); - beforeEach(() => { - ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + beforeEach(async () => { + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [InfluxQueryCtrl.target], + }, + }; + + InfluxQueryCtrl.prototype.target = { target: {} }; + console.log('creating new instance'); + ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); }); - describe('init', function() { - it('should init tagSegments', function() { + describe('init', () => { + it('should init tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); }); - it('should init measurementSegment', function() { + it('should init measurementSegment', () => { expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); }); }); - describe('when first tag segment is updated', function() { - beforeEach(function() { + describe('when first tag segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[0].key).toBe('asd'); expect(ctx.ctrl.tagSegments[0].type).toBe('key'); }); - it('should add tagSegments', function() { + it('should add tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(3); }); }); - describe('when last tag value segment is updated', function() { - beforeEach(function() { + describe('when last tag value segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); }); - it('should update tag value', function() { + it('should update tag value', () => { expect(ctx.ctrl.target.tags[0].value).toBe('server1'); }); - it('should set tag operator', function() { + it('should set tag operator', () => { expect(ctx.ctrl.target.tags[0].operator).toBe('='); }); - it('should add plus button for another filter', function() { + it('should add plus button for another filter', () => { expect(ctx.ctrl.tagSegments[3].fake).toBe(true); }); }); - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); }); - it('should update operator', function() { + it('should update operator', () => { expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); }); }); - describe('when second tag key is added', function() { - beforeEach(function() { + describe('when second tag key is added', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[1].key).toBe('key2'); }); - it('should add AND segment', function() { + it('should add AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); }); }); - describe('when condition is changed', function() { - beforeEach(function() { + describe('when condition is changed', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); }); - it('should update tag condition', function() { + it('should update tag condition', () => { expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); }); - it('should update AND segment', function() { + it('should update AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); expect(ctx.ctrl.tagSegments.length).toBe(7); }); }); - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); }); - it('should remove tags', function() { + it('should remove tags', () => { expect(ctx.ctrl.target.tags.length).toBe(0); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); @@ -204,7 +214,7 @@ describe('InfluxDBQueryCtrl', function() { ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); From 987a16086bbafeccf3c07a5099e5b3ddf914102b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:34:37 +0200 Subject: [PATCH 016/104] Karma to Jest --- .../influxdb/specs/query_ctrl.jest.ts | 72 ++++--------------- 1 file changed, 15 insertions(+), 57 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 139efbc3afab7..6b929432dfa8d 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,73 +1,31 @@ import '../query_ctrl'; -import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; // import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { - let uiSegmentSrv = { - newPlusButton: () => {}, - newKey: key => key, - newKeyValue: key => key, - newSegment: seg => seg, - newSelectMeasurement: () => { - return { value: 'select measurement' }; - }, - newOperator: op => op, - newFake: () => {}, - }; - - let ctx = { - dataSource: {}, - }; - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // ctx.ctrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - beforeEach(async () => { + let ctx = {}; + + beforeEach(() => { InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), + metricFindQuery: () => Promise.resolve([]), }; + InfluxQueryCtrl.prototype.target = { target: {} }; InfluxQueryCtrl.prototype.panelCtrl = { panel: { - targets: [InfluxQueryCtrl.target], + targets: [InfluxQueryCtrl.prototype.target], }, + refresh: () => {}, }; - InfluxQueryCtrl.prototype.target = { target: {} }; - console.log('creating new instance'); - ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + ctx.ctrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }) + ); }); describe('init', () => { From 48ae9ec77ebbc5e3b1546a795af1f8fded555ff4 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:35:37 +0200 Subject: [PATCH 017/104] Remove comments and Karm test --- .../influxdb/specs/query_ctrl.jest.ts | 2 - .../influxdb/specs/query_ctrl_specs.ts | 193 ------------------ 2 files changed, 195 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 6b929432dfa8d..4e3fc47a5fdeb 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,7 +1,5 @@ import '../query_ctrl'; import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -// import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d38..0000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From c0f9c06f2163dc57424257b204e6c6c449aa0212 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:37:21 +0200 Subject: [PATCH 018/104] Karma to Jest: completer --- .../{completer_specs.ts => completer.jest.ts} | 70 +++++++++---------- 1 file changed, 34 insertions(+), 36 deletions(-) rename public/app/plugins/datasource/prometheus/specs/{completer_specs.ts => completer.jest.ts} (79%) diff --git a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts similarity index 79% rename from public/app/plugins/datasource/prometheus/specs/completer_specs.ts rename to public/app/plugins/datasource/prometheus/specs/completer.jest.ts index 846948340898f..cb8dd8e5bd692 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,47 +1,45 @@ -import { describe, it, sinon, expect } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; +//import { describe, it, sinon, expect } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('../datasource'); +jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - var ctx = new helpers.ServiceTestContext(); - beforeEach(ctx.providePhase(['templateSrv'])); + //beforeEach(ctx.providePhase(['templateSrv'])); function getSessionStub(data) { return { - getTokenAt: sinon.stub().returns(data.currentToken), - getTokens: sinon.stub().returns(data.tokens), - getLine: sinon.stub().returns(data.line), + getTokenAt:jest.fn(()=> (data.currentToken)), + getTokens:jest.fn(()=> (data.tokens)), + getLine:jest.fn(()=> (data.line)), }; } let editor = {}; - let datasourceStub = { - performInstantQuery: sinon - .stub() - .withArgs({ expr: '{__name__="node_cpu"' }) - .returns( - Promise.resolve({ - data: { + + let backendSrv = {} + let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); + + datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', + }, }, - }, - ], + ], + }, }, - }, - }) - ), - performSuggestQuery: sinon - .stub() - .withArgs('node', true) - .returns(Promise.resolve(['node_cpu'])), - }; + }) + ); + datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); + let templateSrv = { variables: [ @@ -62,9 +60,9 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, '[', (s, res) => { - expect(res[0].caption).to.eql('$__interval'); - expect(res[0].value).to.eql('[$__interval'); - expect(res[0].meta).to.eql('range vector'); + expect(res[0].caption).toEqual('$__interval'); + expect(res[0].value).toEqual('[$__interval'); + expect(res[0].meta).toEqual('range vector'); }); }); }); @@ -93,7 +91,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -125,7 +123,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -156,7 +154,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 15 }, 'n', (s, res) => { - expect(res[0].meta).to.eql('label value'); + expect(res[0].meta).toEqual('label value'); }); }); }); @@ -192,7 +190,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'm', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); From 49a8c2e0c138118f4e1bc3bfa37446eba596b98c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:44:11 +0200 Subject: [PATCH 019/104] Make beautiful --- .../prometheus/specs/completer.jest.ts | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index cb8dd8e5bd692..b401cb9bf657b 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -12,35 +12,35 @@ describe('Prometheus editor completer', function() { function getSessionStub(data) { return { - getTokenAt:jest.fn(()=> (data.currentToken)), - getTokens:jest.fn(()=> (data.tokens)), - getLine:jest.fn(()=> (data.line)), + getTokenAt: jest.fn(() => data.currentToken), + getTokens: jest.fn(() => data.tokens), + getLine: jest.fn(() => data.line), }; } let editor = {}; - let backendSrv = {} - let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); - - datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ - data: { - data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', - }, - }, - ], + let backendSrv = {}; + let datasourceStub = new PrometheusDatasource({}, {}, backendSrv, {}, {}); + + datasourceStub.performInstantQuery = jest.fn(() => + Promise.resolve({ + data: { + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', }, }, - }) - ); + ], + }, + }, + }) + ); datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); - let templateSrv = { variables: [ { From d2f81d52d4b121cbc0bc6c39527900a4c5cf2042 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 09:43:34 +0200 Subject: [PATCH 020/104] Karma to Jest: begin influx query_ctrl --- .../influxdb/specs/query_ctrl.jest.ts | 222 ++++++++++++++++++ .../influxdb/specs/query_ctrl_specs.ts | 193 --------------- 2 files changed, 222 insertions(+), 193 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 0000000000000..dd6c9b4fa1892 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,222 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', () => { + //var ctx = new helpers.ControllerTestContext(); + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(($ =>compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // influxQueryCtrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + InfluxQueryCtrl.prototype.target = { target: {} }; + InfluxQueryCtrl.prototype.panelCtrl = { + refresh: jest.fn(), + panel: { + targets: InfluxQueryCtrl.prototype.target, + }, + }; + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + + // let uiSegmentSrv = { + // newPlusButton: jest.fn(), + // newSegment: jest.fn(), + // newSelectMeasurement: jest.fn() + // }; + let influxQueryCtrl; + + beforeEach(() => { + influxQueryCtrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) + ); + }); + describe('init', () => { + it('should init tagSegments', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', () => { + expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); + expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', () => { + console.log(influxQueryCtrl.tagSegments); + expect(influxQueryCtrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', () => { + expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', () => { + expect(influxQueryCtrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', () => { + expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', () => { + expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); + expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', () => { + expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); + expect(influxQueryCtrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', () => { + expect(influxQueryCtrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d38..0000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From d6381bed7cebe7c0270bf0ddacc8333e17fb9658 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 14:34:58 +0200 Subject: [PATCH 021/104] Test fail depending on test order --- .../plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../influxdb/specs/query_ctrl.jest.ts | 4 +- .../influxdb/specs/query_ctrl_specs.ts | 195 ++++++++++++++++++ 3 files changed, 198 insertions(+), 3 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f4589e..1744971114331 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - + console.log(this.tagSegments); this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index dd6c9b4fa1892..0c1ed3ed6b20c 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -46,7 +46,7 @@ describe('InfluxDBQueryCtrl', () => { InfluxQueryCtrl.prototype.panelCtrl = { refresh: jest.fn(), panel: { - targets: InfluxQueryCtrl.prototype.target, + targets: [InfluxQueryCtrl.prototype.target], }, }; InfluxQueryCtrl.prototype.datasource = { @@ -69,6 +69,7 @@ describe('InfluxDBQueryCtrl', () => { new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) ); }); + describe('init', () => { it('should init tagSegments', () => { expect(influxQueryCtrl.tagSegments.length).toBe(1); @@ -90,7 +91,6 @@ describe('InfluxDBQueryCtrl', () => { }); it('should add tagSegments', () => { - console.log(influxQueryCtrl.tagSegments); expect(influxQueryCtrl.tagSegments.length).toBe(3); }); }); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts new file mode 100644 index 0000000000000..151dd7ab0c6d4 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -0,0 +1,195 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + var ctx = new helpers.ControllerTestContext(); + + beforeEach(angularMocks.module('grafana.core')); + beforeEach(angularMocks.module('grafana.controllers')); + beforeEach(angularMocks.module('grafana.services')); + beforeEach( + angularMocks.module(function($compileProvider) { + $compileProvider.preAssignBindingsEnabled(true); + }) + ); + beforeEach(ctx.providePhase()); + + beforeEach( + angularMocks.inject(($rootScope, $controller, $q) => { + ctx.$q = $q; + ctx.scope = $rootScope.$new(); + ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + ctx.target = { target: {} }; + ctx.panelCtrl = { + panel: { + targets: [ctx.target], + }, + }; + ctx.panelCtrl.refresh = sinon.spy(); + ctx.ctrl = $controller( + InfluxQueryCtrl, + { $scope: ctx.scope }, + { + panelCtrl: ctx.panelCtrl, + target: ctx.target, + datasource: ctx.datasource, + } + ); + }) + ); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + console.log(ctx.ctrl.target.tags); + expect(ctx.ctrl.target.tags[0].key).to.be('asd'); + expect(ctx.ctrl.tagSegments[0].type).to.be('key'); + }); + + it('should add tagSegments', function() { + console.log(ctx.ctrl.tagSegments); + expect(ctx.ctrl.tagSegments.length).to.be(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).to.be('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).to.be('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).to.be(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); + expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).to.be('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); + expect(ctx.ctrl.tagSegments.length).to.be(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).to.be(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); +}); From 51caf470f50c07fdb7f6d47d7fe022f2ebfc1ac5 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:55:54 +0200 Subject: [PATCH 022/104] Remove influx qeury_ctrl jest, as it is already completed --- .../influxdb/specs/query_ctrl.jest.ts | 222 ------------------ .../prometheus/specs/completer.jest.ts | 3 - 2 files changed, 225 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts deleted file mode 100644 index 0c1ed3ed6b20c..0000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ /dev/null @@ -1,222 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { uiSegmentSrv } from 'app/core/services/segment_srv'; -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', () => { - //var ctx = new helpers.ControllerTestContext(); - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // influxQueryCtrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - InfluxQueryCtrl.prototype.target = { target: {} }; - InfluxQueryCtrl.prototype.panelCtrl = { - refresh: jest.fn(), - panel: { - targets: [InfluxQueryCtrl.prototype.target], - }, - }; - InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - }; - - // let uiSegmentSrv = { - // newPlusButton: jest.fn(), - // newSegment: jest.fn(), - // newSelectMeasurement: jest.fn() - // }; - let influxQueryCtrl; - - beforeEach(() => { - influxQueryCtrl = new InfluxQueryCtrl( - {}, - {}, - {}, - {}, - new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) - ); - }); - - describe('init', () => { - it('should init tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - }); - - it('should init measurementSegment', () => { - expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); - }); - }); - - describe('when first tag segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); - expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); - }); - - it('should add tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(3); - }); - }); - - describe('when last tag value segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', () => { - expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); - }); - - it('should set tag operator', () => { - expect(influxQueryCtrl.target.tags[0].operator).toBe('='); - }); - - it('should add plus button for another filter', () => { - expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); - }); - }); - - describe('when last tag value segment is updated to regex', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', () => { - expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); - expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); - }); - }); - - describe('when second tag key is added', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); - }); - - it('should add AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); - }); - }); - - describe('when condition is changed', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', () => { - expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); - }); - - it('should update AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); - expect(influxQueryCtrl.tagSegments.length).toBe(7); - }); - }); - - describe('when deleting first tag filter after value is selected', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', () => { - expect(influxQueryCtrl.target.tags.length).toBe(0); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); -}); diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index b401cb9bf657b..fbe2dce0ce50a 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,6 +1,3 @@ -//import { describe, it, sinon, expect } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; - import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; import { BackendSrv } from 'app/core/services/backend_srv'; From b81621b6f5019e12893fdddde32b7850aabbad61 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:24:44 +0200 Subject: [PATCH 023/104] changelog: add notes about closing #12636 #9827 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 160aab9b91a3b..4917c5998d051 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) +* **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) # 5.2.1 (2018-06-29) From 3dab4e1b52c1a4e7712abd5c20da14a4736b8ca4 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:27:13 +0200 Subject: [PATCH 024/104] changelog: add notes about closing #12589 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4917c5998d051..826507e1bd65c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) # 5.2.1 (2018-06-29) From 25c8233523d317a378f628258b86d88686b1a744 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 09:22:39 +0200 Subject: [PATCH 025/104] Begin conversion --- ...query_ctrl_specs.ts => query_ctrl.jest.ts} | 95 +++++++++++-------- 1 file changed, 53 insertions(+), 42 deletions(-) rename public/app/plugins/datasource/graphite/specs/{query_ctrl_specs.ts => query_ctrl.jest.ts} (84%) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts similarity index 84% rename from public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts rename to public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index b4f7718930f3d..776dec0a1a782 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -6,48 +6,59 @@ import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - ctx.datasource.getFuncDef = gfunc.getFuncDef; - ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - ctx.panelCtrl = { panel: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - - ctx.ctrl = $controller( - GraphiteQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - datasource: ctx.datasource, - target: ctx.target, - } - ); - ctx.scope.$digest(); - }) - ); + + let datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + + }; + let ctx = { + + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + //beforeEach(ctx.providePhase()); + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); + // ctx.datasource.getFuncDef = gfunc.getFuncDef; + // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); + // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; + // ctx.panelCtrl = { panel: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + + // ctx.ctrl = $controller( + // GraphiteQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // datasource: ctx.datasource, + // target: ctx.target, + // } + // ); + // ctx.scope.$digest(); + // }) + // ); describe('init', function() { it('should validate metric key exists', function() { From b58a7642dc6b3be313a30be95b455fd6141f8da9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 15:39:56 +0200 Subject: [PATCH 026/104] Karma to Jest --- .../graphite/specs/query_ctrl.jest.ts | 271 ++++++++++-------- 1 file changed, 145 insertions(+), 126 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 776dec0a1a782..58cefeef6f6ae 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,22 +1,27 @@ -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; import gfunc from '../gfunc'; -import helpers from 'test/specs/helpers'; +// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; -describe('GraphiteQueryCtrl', function() { - - let datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), - getFuncDef: gfunc.getFuncDef, - waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), - createFuncInstance: gfunc.createFuncInstance, - +describe('GraphiteQueryCtrl', () => { + let ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + }, + target: { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }, + panelCtrl: { + refresh: jest.fn(), + }, }; - let ctx = { + ctx.panelCtrl.panel = { + targets: [ctx.target], }; // beforeEach(angularMocks.module('grafana.core')); @@ -60,156 +65,170 @@ describe('GraphiteQueryCtrl', function() { // }) // ); - describe('init', function() { - it('should validate metric key exists', function() { - expect(ctx.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*'); + beforeEach(() => { + GraphiteQueryCtrl.prototype.target = ctx.target; + GraphiteQueryCtrl.prototype.datasource = ctx.datasource; + + GraphiteQueryCtrl.prototype.panelCtrl = ctx.panelCtrl; + + ctx.ctrl = new GraphiteQueryCtrl( + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }), + {}, + {} + ); + }); + + describe('init', () => { + it('should validate metric key exists', () => { + expect(ctx.datasource.metricFindQuery.mock.calls[0][0]).toBe('test.prod.*'); }); - it('should delete last segment if no metrics are found', function() { - expect(ctx.ctrl.segments[2].value).to.be('select metric'); + it('should delete last segment if no metrics are found', () => { + expect(ctx.ctrl.segments[2].value).toBe('select metric'); }); - it('should parse expression and build function model', function() { - expect(ctx.ctrl.queryModel.functions.length).to.be(2); + it('should parse expression and build function model', () => { + expect(ctx.ctrl.queryModel.functions.length).toBe(2); }); }); - describe('when adding function', function() { - beforeEach(function() { + describe('when adding function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.prod.*.count'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode')); }); - it('should add function with correct node number', function() { - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2); + it('should add function with correct node number', () => { + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(2); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(test.prod.*.count, 2)'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(test.prod.*.count, 2)'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when adding function before any metric segment', function() { - beforeEach(function() { + describe('when adding function before any metric segment', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([{ expandable: true }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: true }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent')); }); - it('should add function and remove select metric link', function() { - expect(ctx.ctrl.segments.length).to.be(0); + it('should add function and remove select metric link', () => { + expect(ctx.ctrl.segments.length).toBe(0); }); }); - describe('when initializing target without metric expression and only function', function() { - beforeEach(function() { + describe('when initializing target without metric expression and only function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(#A, #B)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); - ctx.scope.$digest(); }); - it('should not add select metric segment', function() { - expect(ctx.ctrl.segments.length).to.be(1); + it('should not add select metric segment', () => { + expect(ctx.ctrl.segments.length).toBe(1); }); - it('should add second series ref as param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add second series ref as param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing a target with single param func using variable', function() { - beforeEach(function() { + describe('when initializing a target with single param func using variable', () => { + beforeEach(() => { ctx.ctrl.target.target = 'movingAverage(prod.count, $var)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add 2 segments', function() { - expect(ctx.ctrl.segments.length).to.be(2); + it('should add 2 segments', () => { + expect(ctx.ctrl.segments.length).toBe(2); }); - it('should add function param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add function param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing target without metric expression and function with series-ref', function() { - beforeEach(function() { + describe('when initializing target without metric expression and function with series-ref', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add segments', function() { - expect(ctx.ctrl.segments.length).to.be(3); + it('should add segments', () => { + expect(ctx.ctrl.segments.length).toBe(3); }); - it('should have correct func params', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should have correct func params', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when getting altSegments and metricFindQuery returns empty array', function() { - beforeEach(function() { + describe('when getting altSegments and metricFindQuery returns empty array', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.count'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); ctx.ctrl.getAltSegments(1).then(function(results) { ctx.altSegments = results; }); - ctx.scope.$digest(); }); - it('should have no segments', function() { - expect(ctx.altSegments.length).to.be(0); + it('should have no segments', () => { + expect(ctx.altSegments.length).toBe(0); }); }); - describe('targetChanged', function() { - beforeEach(function() { - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + describe('targetChanged', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.target.target = ''; ctx.ctrl.targetChanged(); }); - it('should rebuld target after expression model', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); + it('should rebuild target after expression model', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); }); - it('should call panelCtrl.refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call panelCtrl.refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when updating targets with nested query', function() { - beforeEach(function() { + describe('when updating targets with nested query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add function params', function() { - expect(ctx.ctrl.queryModel.segments.length).to.be(1); - expect(ctx.ctrl.queryModel.segments[0].value).to.be('#A'); + it('should add function params', () => { + expect(ctx.ctrl.queryModel.segments.length).toBe(1); + expect(ctx.ctrl.queryModel.segments[0].value).toBe('#A'); - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(60); + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(60); }); - it('target should remain the same', function() { - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + it('target should remain the same', () => { + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); }); - it('targetFull should include nested queries', function() { + it('targetFull should include nested queries', () => { ctx.ctrl.panelCtrl.panel.targets = [ { target: 'nested.query.count', @@ -219,17 +238,17 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); - expect(ctx.ctrl.target.targetFull).to.be('scaleToSeconds(nested.query.count, 60)'); + expect(ctx.ctrl.target.targetFull).toBe('scaleToSeconds(nested.query.count, 60)'); }); }); - describe('when updating target used in other query', function() { - beforeEach(function() { + describe('when updating target used in other query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'metrics.a.count'; ctx.ctrl.target.refId = 'A'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }]; @@ -237,113 +256,113 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); }); - it('targetFull of other query should update', function() { - expect(ctx.ctrl.panel.targets[1].targetFull).to.be('sumSeries(metrics.a.count)'); + it('targetFull of other query should update', () => { + expect(ctx.ctrl.panel.targets[1].targetFull).toBe('sumSeries(metrics.a.count)'); }); }); - describe('when adding seriesByTag function', function() { - beforeEach(function() { + describe('when adding seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag')); }); - it('should update functions', function() { - expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0); + it('should update functions', () => { + expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).toBe(0); }); - it('should update seriesByTagUsed flag', function() { - expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true); + it('should update seriesByTagUsed flag', () => { + expect(ctx.ctrl.queryModel.seriesByTagUsed).toBe(true); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('seriesByTag()'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('seriesByTag()'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when parsing seriesByTag function', function() { - beforeEach(function() { + describe('when parsing seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add tags', function() { + it('should add tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'value1' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should add plus button', function() { - expect(ctx.ctrl.addTagSegments.length).to.be(1); + it('should add plus button', () => { + expect(ctx.ctrl.addTagSegments.length).toBe(1); }); }); - describe('when tag added', function() { - beforeEach(function() { + describe('when tag added', () => { + beforeEach(() => { ctx.ctrl.target.target = 'seriesByTag()'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addNewTag({ value: 'tag1' }); }); - it('should update tags with default value', function() { + it('should update tags with default value', () => { const expected = [{ key: 'tag1', operator: '=', value: '' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag changed', function() { - beforeEach(function() { + describe('when tag changed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'new_value' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag removed', function() { - beforeEach(function() { + describe('when tag removed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.removeTag(0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); }); From 1c691ac855142222dc4549a613d52a1171487e1d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:51:34 +0200 Subject: [PATCH 027/104] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 826507e1bd65c..0f3fb6b9d01c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) # 5.2.1 (2018-06-29) From a63fca03b87193c87d6154628254998a06cf434d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:57:07 +0200 Subject: [PATCH 028/104] changelog: add notes about closing #12551 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f3fb6b9d01c5..6a7d2db1c14e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) From 5de8b6c2f01cdfa0505f93e6469a38702fdd66fa Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 16:45:36 +0200 Subject: [PATCH 029/104] changelog: add notes about closing #12489 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a7d2db1c14e4..aa794b92164e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) +* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) # 5.2.2 (unreleased) From 27c081349fb11f1ad8d304873aa9cc92a45a2027 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 17:03:58 +0200 Subject: [PATCH 030/104] Remove old influx stuff --- public/app/plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index 1744971114331..ce669c9f4589e 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - console.log(this.tagSegments); + this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts index 151dd7ab0c6d4..4daa48d6b9d38 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -57,13 +57,11 @@ describe('InfluxDBQueryCtrl', function() { }); it('should update tag key', function() { - console.log(ctx.ctrl.target.tags); expect(ctx.ctrl.target.tags[0].key).to.be('asd'); expect(ctx.ctrl.tagSegments[0].type).to.be('key'); }); it('should add tagSegments', function() { - console.log(ctx.ctrl.tagSegments); expect(ctx.ctrl.tagSegments.length).to.be(3); }); }); From d8d748d2aa9987e93e6b8988b66d2d217be98ac0 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 17:40:00 +0200 Subject: [PATCH 031/104] remove unneeded comment --- .../app/plugins/datasource/prometheus/specs/completer.jest.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index fbe2dce0ce50a..b29e4d272337a 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -5,8 +5,6 @@ jest.mock('../datasource'); jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - //beforeEach(ctx.providePhase(['templateSrv'])); - function getSessionStub(data) { return { getTokenAt: jest.fn(() => data.currentToken), From ce9b25a5ac66f0f6a8b9a2f1c91b14c184ed9143 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 18:30:29 +0200 Subject: [PATCH 032/104] Remove comments --- .../graphite/specs/query_ctrl.jest.ts | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 58cefeef6f6ae..b38ad56427bdd 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,8 +1,5 @@ import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import gfunc from '../gfunc'; -// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', () => { @@ -24,47 +21,6 @@ describe('GraphiteQueryCtrl', () => { targets: [ctx.target], }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - //beforeEach(ctx.providePhase()); - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - // ctx.datasource.getFuncDef = gfunc.getFuncDef; - // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - // ctx.panelCtrl = { panel: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - - // ctx.ctrl = $controller( - // GraphiteQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // datasource: ctx.datasource, - // target: ctx.target, - // } - // ); - // ctx.scope.$digest(); - // }) - // ); - beforeEach(() => { GraphiteQueryCtrl.prototype.target = ctx.target; GraphiteQueryCtrl.prototype.datasource = ctx.datasource; From 1dd9646a502c8f0749ed1752b25f39111677effb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:05:09 +0200 Subject: [PATCH 033/104] fix failing test due to time diff issues --- pkg/services/sqlstore/dashboard_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index 0ca1c5d67e49f..8ff78c4a0ffa8 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -181,7 +181,7 @@ func TestDashboardDataAccess(t *testing.T) { So(err, ShouldBeNil) So(query.Result.FolderId, ShouldEqual, 0) So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy) - So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second)) + So(query.Result.Created, ShouldHappenWithin, 3*time.Second, savedDash.Created) So(query.Result.UpdatedBy, ShouldEqual, 100) So(query.Result.Updated.IsZero(), ShouldBeFalse) }) From 582652145fa825cfce0a85b827d70f09b2cda45e Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:21:23 +0200 Subject: [PATCH 034/104] minor fixes --- docs/sources/features/datasources/prometheus.md | 6 +++++- docs/sources/reference/templating.md | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 190220fb0f17c..0ed9e108df640 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -76,7 +76,11 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). -It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +#### Using interval and range variables + +> Support for `$__range` and `$__range_ms` only available from Grafana v5.3 + +It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. ### Using variables in queries diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 08a142d363691..ce1a1299d26ec 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -274,6 +274,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. ### The $__range Variable + +> Only available in Grafana v5.3+ + Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. ## Repeating Panels From 055d208a326f08cc4ad69324f9c4c1722b35e59e Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Wed, 25 Jul 2018 11:27:43 +0900 Subject: [PATCH 035/104] fix invalid reference --- pkg/tsdb/cloudwatch/cloudwatch.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 4af73fc2ba9f9..92352a5131538 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -99,14 +99,15 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo continue } + RefId := queryContext.Queries[i].RefId query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - result.Results[query.RefId] = &tsdb.QueryResult{ + result.Results[RefId] = &tsdb.QueryResult{ Error: err, } return result, nil } - query.RefId = queryContext.Queries[i].RefId + query.RefId = RefId if query.Id != "" { if _, ok := getMetricDataQueries[query.Region]; !ok { From f4ab432542383c726d517f7a70000460d69ac4b3 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 25 Jul 2018 10:29:55 +0200 Subject: [PATCH 036/104] added position absolute and some flexbox so I could remov changes in display and setTimeout, added tests and types, did some renaming --- public/app/containers/Teams/TeamList.tsx | 2 +- .../DeleteButton/DeleteButton.jest.tsx | 44 ++++++++++ .../components/DeleteButton/DeleteButton.tsx | 82 ++++++++----------- public/sass/components/_delete_button.scss | 37 +++++---- 4 files changed, 99 insertions(+), 66 deletions(-) create mode 100644 public/app/core/components/DeleteButton/DeleteButton.jest.tsx diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index 87d24f8ddd4c3..b86763d879985 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -55,7 +55,7 @@ export class TeamList extends React.Component { {team.memberCount} ); diff --git a/public/app/core/components/DeleteButton/DeleteButton.jest.tsx b/public/app/core/components/DeleteButton/DeleteButton.jest.tsx new file mode 100644 index 0000000000000..12acadee18adc --- /dev/null +++ b/public/app/core/components/DeleteButton/DeleteButton.jest.tsx @@ -0,0 +1,44 @@ +import React from 'react'; +import DeleteButton from './DeleteButton'; +import { shallow } from 'enzyme'; + +describe('DeleteButton', () => { + let wrapper; + let deleted; + + beforeAll(() => { + deleted = false; + + function deleteItem() { + deleted = true; + } + wrapper = shallow( deleteItem()} />); + }); + + it('should show confirm delete when clicked', () => { + expect(wrapper.state().showConfirm).toBe(false); + wrapper.find('.delete-button').simulate('click'); + expect(wrapper.state().showConfirm).toBe(true); + }); + + it('should hide confirm delete when clicked', () => { + wrapper.find('.delete-button').simulate('click'); + expect(wrapper.state().showConfirm).toBe(true); + wrapper + .find('.confirm-delete') + .find('.btn') + .at(0) + .simulate('click'); + expect(wrapper.state().showConfirm).toBe(false); + }); + + it('should show confirm delete when clicked', () => { + expect(deleted).toBe(false); + wrapper + .find('.confirm-delete') + .find('.btn') + .at(1) + .simulate('click'); + expect(deleted).toBe(true); + }); +}); diff --git a/public/app/core/components/DeleteButton/DeleteButton.tsx b/public/app/core/components/DeleteButton/DeleteButton.tsx index 61a322b591eb4..a83ce6097ad0e 100644 --- a/public/app/core/components/DeleteButton/DeleteButton.tsx +++ b/public/app/core/components/DeleteButton/DeleteButton.tsx @@ -1,73 +1,61 @@ -import React, { Component } from 'react'; +import React, { PureComponent } from 'react'; -export default class DeleteButton extends Component { - state = { - deleteButton: 'delete-button--show', - confirmSpan: 'confirm-delete--removed', +export interface DeleteButtonProps { + onConfirmDelete(); +} + +export interface DeleteButtonStates { + showConfirm: boolean; +} + +export default class DeleteButton extends PureComponent { + state: DeleteButtonStates = { + showConfirm: false, }; - handleDelete = event => { + onClickDelete = event => { if (event) { event.preventDefault(); } this.setState({ - deleteButton: 'delete-button--hide', + showConfirm: true, }); - - setTimeout(() => { - this.setState({ - deleteButton: 'delete-button--removed', - }); - }, 100); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--hide', - }); - }, 100); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--show', - }); - }, 150); }; - cancelDelete = event => { - event.preventDefault(); - + onClickCancel = event => { + if (event) { + event.preventDefault(); + } this.setState({ - confirmSpan: 'confirm-delete--hide', + showConfirm: false, }); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--removed', - deleteButton: 'delete-button--hide', - }); - }, 140); - - setTimeout(() => { - this.setState({ - deleteButton: 'delete-button--show', - }); - }, 190); }; render() { - const { confirmDelete } = this.props; + const onClickConfirm = this.props.onConfirmDelete; + let showConfirm; + let showDeleteButton; + + if (this.state.showConfirm) { + showConfirm = 'show'; + showDeleteButton = 'hide'; + } else { + showConfirm = 'hide'; + showDeleteButton = 'show'; + } + return ( - + - - + + Cancel - + Confirm Delete diff --git a/public/sass/components/_delete_button.scss b/public/sass/components/_delete_button.scss index 19f32189d81c7..e56a1181a093c 100644 --- a/public/sass/components/_delete_button.scss +++ b/public/sass/components/_delete_button.scss @@ -1,49 +1,50 @@ +// sets a fixed width so that the rest of the table +// isn't affected by the animation .delete-button-container { - max-width: 24px; width: 24px; direction: rtl; - max-height: 38px; - display: block; + display: flex; + align-items: center; } +//this container is used to make sure confirm-delete isn't +//shown outside of table .confirm-delete-container { overflow: hidden; width: 145px; - display: block; + position: absolute; + z-index: 1; } .delete-button { - &--show { - display: inline-block; + position: absolute; + + &.show { opacity: 1; transition: opacity 0.1s ease; + z-index: 2; } - &--hide { - display: inline-block; + &.hide { opacity: 0; transition: opacity 0.1s ease; - } - &--removed { - display: none; + z-index: 0; } } .confirm-delete { - &--show { - display: inline-block; + display: flex; + align-items: flex-start; + + &.show { opacity: 1; transition: opacity 0.08s ease-out, transform 0.1s ease-out; transform: translateX(0); } - &--hide { - display: inline-block; + &.hide { opacity: 0; transition: opacity 0.12s ease-in, transform 0.14s ease-in; transform: translateX(100px); } - &--removed { - display: none; - } } From df62282c115cea465577b5f1c02077b87166255e Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 25 Jul 2018 11:27:43 +0200 Subject: [PATCH 037/104] fix for typeahead background, increased lighten --- public/sass/_variables.light.scss | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index b6e9e7db979bd..b6248da6a002c 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -218,7 +218,7 @@ $search-filter-box-bg: $gray-7; // Typeahead $typeahead-shadow: 0 5px 10px 0 $gray-5; -$typeahead-selected-bg: lighten($blue, 25%); +$typeahead-selected-bg: lighten($blue, 57%); $typeahead-selected-color: $blue; // Dropdowns From 5fbd8ada3c55cfe8eecc57d894b6a445b76e00c9 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:54:51 +0200 Subject: [PATCH 038/104] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa794b92164e2..27651b2216f43 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 45762d04e392be18658df8a0ecd081a03bb09b5f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:55:34 +0200 Subject: [PATCH 039/104] changelog: update [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 27651b2216f43..0f813272e60f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,7 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) -# 5.2.2 (unreleased) +# 5.2.2 (2018-07-25) ### Minor From 9c40028d58431fcab8c3d7dddb44b2593a0c7130 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 13:22:55 +0200 Subject: [PATCH 040/104] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f813272e60f4..990421d30d38d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.2 (2018-07-25) @@ -33,7 +34,6 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) -* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 7e773e2d5e35045f87be875fa81ac2c930d1257f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 14:14:25 +0200 Subject: [PATCH 041/104] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 990421d30d38d..6409f094f6573 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,7 @@ * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) -* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533), thx [@mtanda](https://github.com/mtanda) # 5.2.1 (2018-06-29) From f3504612062f2bcf43a02c985942d5b70ca52439 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 14:52:03 +0200 Subject: [PATCH 042/104] Start conversion --- .../specs/variable_srv_init.jest.ts | 238 ++++++++++++++++++ 1 file changed, 238 insertions(+) create mode 100644 public/app/features/templating/specs/variable_srv_init.jest.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts new file mode 100644 index 0000000000000..218170ae45477 --- /dev/null +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -0,0 +1,238 @@ +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; + +import '../all'; + +import _ from 'lodash'; +// import helpers from 'test/specs/helpers'; +// import { Emitter } from 'app/core/core'; +import { VariableSrv } from '../variable_srv'; +import $q from 'q'; + +describe('VariableSrv init', function() { + let templateSrv = { + init: () => {}, + }; + let $injector = { + instantiate: (vars, model) => { + return new vars(model.model); + }, + }; + let $rootscope = { + $on: () => {}, + }; + + let ctx = { + datasourceSrv: {}, + $location: {}, + dashboard: {}, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); + // beforeEach( + // angularMocks.inject(($rootScope, $q, $location, $injector) => { + // ctx.$q = $q; + // ctx.$rootScope = $rootScope; + // ctx.$location = $location; + // ctx.variableSrv = $injector.get('variableSrv'); + // ctx.$rootScope.$digest(); + // }) + // ); + + function describeInitScenario(desc, fn) { + describe(desc, function() { + // events: new Emitter(), + var scenario: any = { + urlParams: {}, + setup: setupFn => { + scenario.setupFn = setupFn; + }, + }; + + beforeEach(function() { + scenario.setupFn(); + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); + ctx.variableSrv.datasource = {}; + ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); + + ctx.variableSrv.datasourceSrv = { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }; + + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.dashboard = { + templating: { list: scenario.variables }, + // events: new Emitter(), + }; + + ctx.variableSrv.init(ctx.variableSrv.dashboard); + // ctx.$rootScope.$digest(); + + scenario.variables = ctx.variableSrv.variables; + }); + + fn(scenario); + }); + } + + ['query', 'interval', 'custom', 'datasource'].forEach(type => { + describeInitScenario('when setting ' + type + ' variable via url', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: type, + current: { text: 'test', value: 'test' }, + options: [{ text: 'test', value: 'test' }], + }, + ]; + scenario.urlParams['var-apps'] = 'new'; + scenario.metricSources = []; + }); + + it('should update current value', () => { + expect(scenario.variables[0].current.value).toBe('new'); + expect(scenario.variables[0].current.text).toBe('new'); + }); + }); + }); + + describe('given dependent variables', () => { + var variableList = [ + { + name: 'app', + type: 'query', + query: '', + current: { text: 'app1', value: 'app1' }, + options: [{ text: 'app1', value: 'app1' }], + }, + { + name: 'server', + type: 'query', + refresh: 1, + query: '$app.*', + current: { text: 'server1', value: 'server1' }, + options: [{ text: 'server1', value: 'server1' }], + }, + ]; + + describeInitScenario('when setting parent var from url', scenario => { + scenario.setup(() => { + scenario.variables = _.cloneDeep(variableList); + scenario.urlParams['var-app'] = 'google'; + scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; + }); + + it('should update child variable', () => { + expect(scenario.variables[1].options.length).toBe(2); + expect(scenario.variables[1].current.text).toBe('google-server1'); + }); + + it('should only update it once', () => { + expect(ctx.variableSrv.datasource.metricFindQuery).toHaveBeenCalledTimes(1); + }); + }); + }); + + describeInitScenario('when datasource variable is initialized', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + type: 'datasource', + query: 'graphite', + name: 'test', + current: { value: 'backend4_pee', text: 'backend4_pee' }, + regex: '/pee$/', + }, + ]; + scenario.metricSources = [ + { name: 'backend1', meta: { id: 'influx' } }, + { name: 'backend2_pee', meta: { id: 'graphite' } }, + { name: 'backend3', meta: { id: 'graphite' } }, + { name: 'backend4_pee', meta: { id: 'graphite' } }, + ]; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options.length).toBe(2); + }); + }); + + describeInitScenario('when template variable is present in url multiple times', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'val1', value: 'val1' }, + options: [ + { text: 'val1', value: 'val1' }, + { text: 'val2', value: 'val2' }, + { text: 'val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('val2 + val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); + + describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'Val1', value: 'val1' }, + options: [ + { text: 'Val1', value: 'val1' }, + { text: 'Val2', value: 'val2' }, + { text: 'Val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('Val2 + Val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); +}); From 7d51c1524007fc47dc225e1256535c1386c07aca Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 16:15:03 +0200 Subject: [PATCH 043/104] Two passing tests --- .../specs/variable_srv_init.jest.ts | 53 ++++++++++++++----- .../app/features/templating/variable_srv.ts | 1 + 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 218170ae45477..519adc0a35032 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -7,16 +7,18 @@ import _ from 'lodash'; // import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; +// import { model } from 'mobx-state-tree/dist/internal'; describe('VariableSrv init', function() { let templateSrv = { - init: () => {}, - }; - let $injector = { - instantiate: (vars, model) => { - return new vars(model.model); + init: vars => { + this.variables = vars; }, + variableInitialized: () => {}, + updateTemplateData: () => {}, + replace: str => str, }; + let $injector = {}; let $rootscope = { $on: () => {}, }; @@ -57,24 +59,35 @@ describe('VariableSrv init', function() { }, }; - beforeEach(function() { + beforeEach(async () => { scenario.setupFn(); + ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve(scenario.queryResult)), + }, + datasourceSrv: { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }, + templateSrv, + }; + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); - ctx.variableSrv.datasource = {}; - ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); - ctx.variableSrv.datasourceSrv = { - get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + $injector.instantiate = (variable, model) => { + return getVarMockConstructor(variable, model, ctx); }; + ctx.variableSrv.datasource = ctx.datasource; + ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), + // events: new Emitter(), }; - ctx.variableSrv.init(ctx.variableSrv.dashboard); + await ctx.variableSrv.init(ctx.variableSrv.dashboard); // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; @@ -236,3 +249,17 @@ describe('VariableSrv init', function() { }); }); }); + +function getVarMockConstructor(variable, model, ctx) { + console.log(model.model.type); + switch (model.model.type) { + case 'datasource': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'query': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'interval': + return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + default: + return new variable(model.model); + } +} diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 8ad3c2845e206..9f6522c9b86c7 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,6 +23,7 @@ export class VariableSrv { // init variables for (let variable of this.variables) { + console.log(variable); variable.initLock = this.$q.defer(); } From 0f99e624b680b60e00ca05f408c5b85464d7cf81 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 16:20:00 +0200 Subject: [PATCH 044/104] docs: using interval and range variables in prometheus Included example usages --- .../features/datasources/prometheus.md | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 0ed9e108df640..3a04ef92e31aa 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -80,7 +80,26 @@ For details of *metric names*, *label names* and *label values* are please refer > Support for `$__range` and `$__range_ms` only available from Grafana v5.3 -It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since +`label_values` function doesn't support queries. + +Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard. + +**Example usage:** + +Populate a variable with the the busiest 5 request instances based on average QPS over the time range shown in the dashboard: + +``` +Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instance))) +Regex: /"([^"]+)"/ +``` + +Populate a variable with the instances having a certain state over the time range shown in the dashboard: + +``` +Query: query_result(max_over_time([$__range]) != ) +Regex: +``` ### Using variables in queries From 84e431d377b51405f37b4bae8321454218bcc7c4 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 25 Jul 2018 16:16:33 +0200 Subject: [PATCH 045/104] Add tslib to TS compiler - using tslib reduces bundle sizes - add compiler option for easier default imports of CJS modules - remove double entry of fork-ts-checker-plugin - speed up hot reload by using exprimental ts-loader API --- package.json | 16 ++++---- scripts/webpack/webpack.hot.js | 10 ++++- tsconfig.json | 73 +++++++++++++++++++--------------- yarn.lock | 8 +++- 4 files changed, 65 insertions(+), 42 deletions(-) diff --git a/package.json b/package.json index c26438230cc8b..c0581c1de43e3 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "expose-loader": "^0.7.3", "extract-text-webpack-plugin": "^4.0.0-beta.0", "file-loader": "^1.1.11", - "fork-ts-checker-webpack-plugin": "^0.4.1", + "fork-ts-checker-webpack-plugin": "^0.4.2", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -71,12 +71,14 @@ "karma-webpack": "^3.0.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", + "mini-css-extract-plugin": "^0.4.0", "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.2.1-pre", "ngtemplate-loader": "^2.0.1", "npm": "^5.4.2", + "optimize-css-assets-webpack-plugin": "^4.0.2", "phantomjs-prebuilt": "^2.1.15", "postcss-browser-reporter": "^0.5.0", "postcss-loader": "^2.0.6", @@ -90,15 +92,16 @@ "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-loader": "^4.3.0", "ts-jest": "^22.4.6", + "ts-loader": "^4.3.0", + "tslib": "^1.9.3", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", "typescript": "^2.6.2", + "uglifyjs-webpack-plugin": "^1.2.7", "webpack": "^4.8.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", - "fork-ts-checker-webpack-plugin": "^0.4.2", "webpack-cli": "^2.1.4", "webpack-dev-server": "^3.1.0", "webpack-merge": "^4.1.0", @@ -155,14 +158,12 @@ "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.10", - "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", - "optimize-css-assets-webpack-plugin": "^4.0.2", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", @@ -181,10 +182,9 @@ "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", - "tinycolor2": "^1.4.1", - "uglifyjs-webpack-plugin": "^1.2.7" + "tinycolor2": "^1.4.1" }, "resolutions": { "caniuse-db": "1.0.30000772" } -} +} \ No newline at end of file diff --git a/scripts/webpack/webpack.hot.js b/scripts/webpack/webpack.hot.js index 28c8cec504d8a..0305a6f465c70 100644 --- a/scripts/webpack/webpack.hot.js +++ b/scripts/webpack/webpack.hot.js @@ -20,6 +20,7 @@ module.exports = merge(common, { path: path.resolve(__dirname, '../../public/build'), filename: '[name].[hash].js', publicPath: "/public/build/", + pathinfo: false, }, resolve: { @@ -37,6 +38,12 @@ module.exports = merge(common, { } }, + optimization: { + removeAvailableModules: false, + removeEmptyChunks: false, + splitChunks: false, + }, + module: { rules: [ { @@ -56,7 +63,8 @@ module.exports = merge(common, { { loader: 'ts-loader', options: { - transpileOnly: true + transpileOnly: true, + experimentalWatchApi: true }, }], }, diff --git a/tsconfig.json b/tsconfig.json index 3596930a62ff4..3ef1dd1b7695c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,32 +1,43 @@ { - "compilerOptions": { - "moduleResolution": "node", - "outDir": "public/dist", - "target": "es5", - "lib": ["es6", "dom"], - "rootDir": "public/", - "jsx": "react", - "module": "esnext", - "declaration": false, - "allowSyntheticDefaultImports": true, - "inlineSourceMap": false, - "sourceMap": true, - "noEmitOnError": false, - "emitDecoratorMetadata": false, - "experimentalDecorators": true, - "noImplicitReturns": true, - "noImplicitThis": false, - "noImplicitUseStrict":false, - "noImplicitAny": false, - "noUnusedLocals": true, - "baseUrl": "public", - "paths": { - "app": ["app"] - } - }, - "include": [ - "public/app/**/*.ts", - "public/app/**/*.tsx", - "public/test/**/*.ts" - ] -} + "compilerOptions": { + "moduleResolution": "node", + "outDir": "public/dist", + "target": "es5", + "lib": [ + "es6", + "dom" + ], + "rootDir": "public/", + "jsx": "react", + "module": "esnext", + "declaration": false, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "importHelpers": true, // importing helper functions from tslib + "noEmitHelpers": true, // disable emitting inline helper functions + "removeComments": false, // comments are needed by angular injections + "inlineSourceMap": false, + "sourceMap": true, + "noEmitOnError": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": true, + "noImplicitReturns": true, + "noImplicitThis": false, + "noImplicitUseStrict": false, + "noImplicitAny": false, + "noUnusedLocals": true, + "baseUrl": "public", + "pretty": true, + "paths": { + "app": [ + "app" + ] + } + }, + "include": [ + "public/app/**/*.ts", + "public/app/**/*.tsx", + "public/test/**/*.ts" + ] +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 6772d7c14a496..6e737e33348bd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3101,7 +3101,7 @@ d3-request@1.0.6: d3-dsv "1" xmlhttprequest "1" -d3-scale-chromatic@^1.1.1: +d3-scale-chromatic@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz#7ee38ffcaa7ad55cfed83a6a668aac5570c653c4" dependencies: @@ -7974,7 +7974,7 @@ mocha@^4.0.1: mkdirp "0.5.1" supports-color "4.4.0" -moment@^2.18.1: +moment@^2.22.2: version "2.22.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66" @@ -12029,6 +12029,10 @@ tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.2.tgz#8be0cc9a1f6dc7727c38deb16c2ebd1a2892988e" +tslib@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" + tslint-loader@^3.5.3: version "3.6.0" resolved "https://registry.yarnpkg.com/tslint-loader/-/tslint-loader-3.6.0.tgz#12ed4d5ef57d68be25cd12692fb2108b66469d76" From 931b944cddb879dfbfb44c5da18bfda43d36a0e9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 17:38:45 +0200 Subject: [PATCH 046/104] Almost all tests passing --- .../specs/variable_srv_init.jest.ts | 42 +++++-------------- .../app/features/templating/variable_srv.ts | 1 - 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 519adc0a35032..eba0ba8cfee86 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -1,13 +1,9 @@ -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import '../all'; import _ from 'lodash'; -// import helpers from 'test/specs/helpers'; -// import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { model } from 'mobx-state-tree/dist/internal'; +// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -16,8 +12,9 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: str => str, + replace: () => ' /pee$/', }; + // let templateSrv = new TemplateSrv(); let $injector = {}; let $rootscope = { $on: () => {}, @@ -29,29 +26,8 @@ describe('VariableSrv init', function() { dashboard: {}, }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - // beforeEach( - // angularMocks.inject(($rootScope, $q, $location, $injector) => { - // ctx.$q = $q; - // ctx.$rootScope = $rootScope; - // ctx.$location = $location; - // ctx.variableSrv = $injector.get('variableSrv'); - // ctx.$rootScope.$digest(); - // }) - // ); - function describeInitScenario(desc, fn) { describe(desc, function() { - // events: new Emitter(), var scenario: any = { urlParams: {}, setup: setupFn => { @@ -81,14 +57,12 @@ describe('VariableSrv init', function() { ctx.variableSrv.datasource = ctx.datasource; ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; - ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.$location.search = () => scenario.urlParams; ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), }; await ctx.variableSrv.init(ctx.variableSrv.dashboard); - // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; }); @@ -113,6 +87,7 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { + console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -176,6 +151,7 @@ describe('VariableSrv init', function() { }); it('should update current value', function() { + console.log(ctx.variableSrv.variables[0].options); var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -251,14 +227,16 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - console.log(model.model.type); + // console.log(model.model.type); switch (model.model.type) { case 'datasource': - return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); case 'query': return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); case 'interval': return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + case 'custom': + return new variable(model.model, ctx.variableSrv); default: return new variable(model.model); } diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 9f6522c9b86c7..8ad3c2845e206 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,7 +23,6 @@ export class VariableSrv { // init variables for (let variable of this.variables) { - console.log(variable); variable.initLock = this.$q.defer(); } From 35cc85bfcc46efdc79cf22b98741a6ea34b93d58 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 09:36:46 +0200 Subject: [PATCH 047/104] All tests passing. Remove Karma test. --- .../specs/variable_srv_init.jest.ts | 31 ++- .../specs/variable_srv_init_specs.ts | 216 ------------------ 2 files changed, 13 insertions(+), 234 deletions(-) delete mode 100644 public/app/features/templating/specs/variable_srv_init_specs.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index eba0ba8cfee86..ea8689f528b1e 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -3,7 +3,6 @@ import '../all'; import _ from 'lodash'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -12,22 +11,21 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: () => ' /pee$/', + replace: str => + str.replace(this.regex, match => { + return match; + }), }; - // let templateSrv = new TemplateSrv(); + let $injector = {}; let $rootscope = { $on: () => {}, }; - let ctx = { - datasourceSrv: {}, - $location: {}, - dashboard: {}, - }; + let ctx = {}; function describeInitScenario(desc, fn) { - describe(desc, function() { + describe(desc, () => { var scenario: any = { urlParams: {}, setup: setupFn => { @@ -43,7 +41,7 @@ describe('VariableSrv init', function() { }, datasourceSrv: { get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + getMetricSources: () => scenario.metricSources, }, templateSrv, }; @@ -87,7 +85,6 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { - console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -150,8 +147,7 @@ describe('VariableSrv init', function() { ]; }); - it('should update current value', function() { - console.log(ctx.variableSrv.variables[0].options); + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -175,7 +171,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -185,7 +181,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -209,7 +205,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -219,7 +215,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -227,7 +223,6 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - // console.log(model.model.type); switch (model.model.type) { case 'datasource': return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); diff --git a/public/app/features/templating/specs/variable_srv_init_specs.ts b/public/app/features/templating/specs/variable_srv_init_specs.ts deleted file mode 100644 index 11639c6aa8f44..0000000000000 --- a/public/app/features/templating/specs/variable_srv_init_specs.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import '../all'; - -import _ from 'lodash'; -import helpers from 'test/specs/helpers'; -import { Emitter } from 'app/core/core'; - -describe('VariableSrv init', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - beforeEach( - angularMocks.inject(($rootScope, $q, $location, $injector) => { - ctx.$q = $q; - ctx.$rootScope = $rootScope; - ctx.$location = $location; - ctx.variableSrv = $injector.get('variableSrv'); - ctx.$rootScope.$digest(); - }) - ); - - function describeInitScenario(desc, fn) { - describe(desc, function() { - var scenario: any = { - urlParams: {}, - setup: setupFn => { - scenario.setupFn = setupFn; - }, - }; - - beforeEach(function() { - scenario.setupFn(); - ctx.datasource = {}; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult)); - - ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ctx.datasource)); - ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources); - - ctx.$location.search = sinon.stub().returns(scenario.urlParams); - ctx.dashboard = { - templating: { list: scenario.variables }, - events: new Emitter(), - }; - - ctx.variableSrv.init(ctx.dashboard); - ctx.$rootScope.$digest(); - - scenario.variables = ctx.variableSrv.variables; - }); - - fn(scenario); - }); - } - - ['query', 'interval', 'custom', 'datasource'].forEach(type => { - describeInitScenario('when setting ' + type + ' variable via url', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: type, - current: { text: 'test', value: 'test' }, - options: [{ text: 'test', value: 'test' }], - }, - ]; - scenario.urlParams['var-apps'] = 'new'; - scenario.metricSources = []; - }); - - it('should update current value', () => { - expect(scenario.variables[0].current.value).to.be('new'); - expect(scenario.variables[0].current.text).to.be('new'); - }); - }); - }); - - describe('given dependent variables', () => { - var variableList = [ - { - name: 'app', - type: 'query', - query: '', - current: { text: 'app1', value: 'app1' }, - options: [{ text: 'app1', value: 'app1' }], - }, - { - name: 'server', - type: 'query', - refresh: 1, - query: '$app.*', - current: { text: 'server1', value: 'server1' }, - options: [{ text: 'server1', value: 'server1' }], - }, - ]; - - describeInitScenario('when setting parent var from url', scenario => { - scenario.setup(() => { - scenario.variables = _.cloneDeep(variableList); - scenario.urlParams['var-app'] = 'google'; - scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; - }); - - it('should update child variable', () => { - expect(scenario.variables[1].options.length).to.be(2); - expect(scenario.variables[1].current.text).to.be('google-server1'); - }); - - it('should only update it once', () => { - expect(ctx.datasource.metricFindQuery.callCount).to.be(1); - }); - }); - }); - - describeInitScenario('when datasource variable is initialized', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - type: 'datasource', - query: 'graphite', - name: 'test', - current: { value: 'backend4_pee', text: 'backend4_pee' }, - regex: '/pee$/', - }, - ]; - scenario.metricSources = [ - { name: 'backend1', meta: { id: 'influx' } }, - { name: 'backend2_pee', meta: { id: 'graphite' } }, - { name: 'backend3', meta: { id: 'graphite' } }, - { name: 'backend4_pee', meta: { id: 'graphite' } }, - ]; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options.length).to.be(2); - }); - }); - - describeInitScenario('when template variable is present in url multiple times', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'val1', value: 'val1' }, - options: [ - { text: 'val1', value: 'val1' }, - { text: 'val2', value: 'val2' }, - { text: 'val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('val2 + val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); - - describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'Val1', value: 'val1' }, - options: [ - { text: 'Val1', value: 'val1' }, - { text: 'Val2', value: 'val2' }, - { text: 'Val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('Val2 + Val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); -}); From 88e91b3f51fa2c5a66442bfa3322abbfbeebd950 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 10:44:40 +0200 Subject: [PATCH 048/104] Begin conversion --- .../panel/singlestat/specs/singlestat.jest.ts | 384 ++++++++++++++++++ 1 file changed, 384 insertions(+) create mode 100644 public/app/plugins/panel/singlestat/specs/singlestat.jest.ts diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts new file mode 100644 index 0000000000000..2c945aa6eb239 --- /dev/null +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -0,0 +1,384 @@ +// import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; + +// import helpers from 'test/specs/helpers'; +import { SingleStatCtrl } from '../module'; +import moment from 'moment'; + +describe('SingleStatCtrl', function() { + let ctx = {}; + let epoch = 1505826363746; + let clock; + + let $scope = { + $on: () => {}, + }; + + let $injector = { + get: () => {}, + }; + + SingleStatCtrl.prototype.panel = { + events: { + on: () => {}, + emit: () => {}, + }, + }; + SingleStatCtrl.prototype.dashboard = { + isTimezoneUtc: () => {}, + }; + + function singleStatScenario(desc, func) { + describe(desc, function() { + ctx.setup = function(setupFunc) { + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase()); + // beforeEach(ctx.createPanelController(SingleStatCtrl)); + + beforeEach(function() { + ctx.ctrl = new SingleStatCtrl($scope, $injector, {}); + setupFunc(); + ctx.ctrl.onDataReceived(ctx.data); + ctx.data = ctx.ctrl.data; + }); + }; + + func(ctx); + }); + } + + singleStatScenario('with defaults', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; + }); + + it('Should use series avg as default main value', function() { + expect(ctx.data.value).toBe(15); + expect(ctx.data.valueRounded).toBe(15); + }); + + it('should set formatted falue', function() { + expect(ctx.data.valueFormatted).toBe('15'); + }); + }); + + singleStatScenario('showing serie name instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; + ctx.ctrl.panel.valueName = 'name'; + }); + + it('Should use series avg as default main value', function() { + expect(ctx.data.value).toBe(0); + expect(ctx.data.valueRounded).toBe(0); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('test.cpu1'); + }); + }); + + singleStatScenario('showing last iso time instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsIso'; + }); + + it('Should use time instead of value', function() { + console.log(ctx.data.value); + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + }); + }); + + singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsIso'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + }); + }); + + singleStatScenario('showing last us time instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsUS'; + }); + + it('Should use time instead of value', function() { + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + }); + }); + + singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsUS'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + }); + }); + + singleStatScenario('showing last time from now instead of value', function(ctx) { + beforeEach(() => { + // clock = sinon.useFakeTimers(epoch); + jest.useFakeTimers(); + }); + + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeFromNow'; + }); + + it('Should use time instead of value', function() { + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('2 days ago'); + }); + + afterEach(() => { + jest.clearAllTimers(); + }); + }); + + singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { + beforeEach(() => { + // clock = sinon.useFakeTimers(epoch); + jest.useFakeTimers(); + }); + + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeFromNow'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('2 days ago'); + }); + + afterEach(() => { + jest.clearAllTimers(); + }); + }); + + singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( + ctx + ) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; + }); + + it('Should be rounded', function() { + expect(ctx.data.value).toBe(99.999495); + expect(ctx.data.valueRounded).toBe(100); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('100'); + }); + }); + + singleStatScenario('When value to text mapping is specified', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[9.9, 1]] }]; + ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; + }); + + it('value should remain', function() { + expect(ctx.data.value).toBe(9.9); + }); + + it('round should be rounded up', function() { + expect(ctx.data.valueRounded).toBe(10); + }); + + it('Should replace value with text', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for first range', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[41, 50]] }]; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text OK', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[65, 75]] }]; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('NOT OK'); + }); + }); + + describe('When table data', function() { + const tableData = [ + { + columns: [{ text: 'Time', type: 'time' }, { text: 'test1' }, { text: 'mean' }, { text: 'test2' }], + rows: [[1492759673649, 'ignore1', 15, 'ignore2']], + type: 'table', + }, + ]; + + singleStatScenario('with default values', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should use first rows value as default main value', function() { + expect(ctx.data.value).toBe(15); + expect(ctx.data.valueRounded).toBe(15); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('15'); + }); + }); + + singleStatScenario('When table data has multiple columns', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.ctrl.panel.tableColumn = ''; + }); + + it('Should set column to first column that is not time', function() { + expect(ctx.ctrl.panel.tableColumn).toBe('test1'); + }); + }); + + singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( + ctx + ) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should be rounded', function() { + expect(ctx.data.value).toBe(99.99999); + expect(ctx.data.valueRounded).toBe(100); + }); + + it('should set formatted falue', function() { + expect(ctx.data.valueFormatted).toBe('100'); + }); + }); + + singleStatScenario('When value to text mapping is specified', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; + }); + + it('value should remain', function() { + expect(ctx.data.value).toBe(9.9); + }); + + it('round should be rounded up', function() { + expect(ctx.data.valueRounded).toBe(10); + }); + + it('Should replace value with text', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for first range', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 41, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text OK', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('NOT OK'); + }); + }); + + singleStatScenario('When value is string', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'test1'; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('ignore1'); + }); + }); + + singleStatScenario('When value is zero', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 0, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should return zero', function() { + expect(ctx.data.value).toBe(0); + }); + }); + }); +}); From 7699451d9438546e6655975d53deb7bf6314562d Mon Sep 17 00:00:00 2001 From: David Date: Thu, 26 Jul 2018 14:04:12 +0200 Subject: [PATCH 049/104] Refactor Explore query field (#12643) * Refactor Explore query field - extract typeahead field that only contains logic for the typeahead mechanics - renamed QueryField to PromQueryField, a wrapper around TypeaheadField that deals with Prometheus-specific concepts - PromQueryField creates a promql typeahead by providing the handlers for producing suggestions, and for applying suggestions - The `refresher` promise is needed to trigger a render once an async action in the wrapper returns. This is prep work for a composable query field to be used by Explore, as well as editors in datasource plugins. * Added typeahead handling tests - extracted context-to-suggestion logic to make it testable - kept DOM-dependent parts in main onTypeahead funtion * simplified error handling in explore query field * Refactor query suggestions - use monaco's suggestion types (roughly), see https://github.com/Microsoft/monaco-editor/blob/f6fb545/monaco.d.ts#L4208 - suggest functions and metrics in empty field (ctrl+space) - copy and expand prometheus function docs from prometheus datasource (will be migrated back to the datasource in the future) * Added prop and state types, removed unused cwrp * Split up suggestion processing for code readability --- .../Explore/PromQueryField.jest.tsx | 125 ++++ .../app/containers/Explore/PromQueryField.tsx | 340 +++++++++++ public/app/containers/Explore/QueryField.tsx | 545 ++++++++---------- public/app/containers/Explore/QueryRows.tsx | 6 +- public/app/containers/Explore/Typeahead.tsx | 61 +- .../Explore/slate-plugins/prism/promql.ts | 417 ++++++++++++-- public/sass/components/_slate_editor.scss | 1 + 7 files changed, 1096 insertions(+), 399 deletions(-) create mode 100644 public/app/containers/Explore/PromQueryField.jest.tsx create mode 100644 public/app/containers/Explore/PromQueryField.tsx diff --git a/public/app/containers/Explore/PromQueryField.jest.tsx b/public/app/containers/Explore/PromQueryField.jest.tsx new file mode 100644 index 0000000000000..8d2903cb2c229 --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.jest.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import Enzyme, { shallow } from 'enzyme'; +import Adapter from 'enzyme-adapter-react-16'; + +Enzyme.configure({ adapter: new Adapter() }); + +import PromQueryField from './PromQueryField'; + +describe('PromQueryField typeahead handling', () => { + const defaultProps = { + request: () => ({ data: { data: [] } }), + }; + + it('returns default suggestions on emtpty context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + describe('range suggestions', () => { + it('returns range suggestions in range context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '1', prefix: '1', wrapperClasses: ['context-range'] }); + expect(result.context).toBe('context-range'); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions).toEqual([ + { + items: [{ label: '1m' }, { label: '5m' }, { label: '10m' }, { label: '30m' }, { label: '1h' }], + label: 'Range vector', + }, + ]); + }); + }); + + describe('metric suggestions', () => { + it('returns metrics suggestions by default', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'a', prefix: 'a', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + it('returns default suggestions after a binary operator', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: '*', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + }); + + describe('label suggestions', () => { + it('returns default label suggestions on label context and no metric', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]); + }); + + it('returns label suggestions on label context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'foo', + }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + + it('returns a refresher on label context and unavailable metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'xxx', + }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeInstanceOf(Promise); + expect(result.suggestions).toEqual([]); + }); + + it('returns label values on label context when given a metric and a label key', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: '=ba', + prefix: 'ba', + wrapperClasses: ['context-labels'], + metric: 'foo', + labelKey: 'bar', + }); + expect(result.context).toBe('context-label-values'); + expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]); + }); + + it('returns label suggestions on aggregation context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-aggregation'], + metric: 'foo', + }); + expect(result.context).toBe('context-aggregation'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + }); +}); diff --git a/public/app/containers/Explore/PromQueryField.tsx b/public/app/containers/Explore/PromQueryField.tsx new file mode 100644 index 0000000000000..eb8fc25c67f6c --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.tsx @@ -0,0 +1,340 @@ +import _ from 'lodash'; +import React from 'react'; + +// dom also includes Element polyfills +import { getNextCharacter, getPreviousCousin } from './utils/dom'; +import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; +import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql'; +import RunnerPlugin from './slate-plugins/runner'; +import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; + +import TypeaheadField, { + Suggestion, + SuggestionGroup, + TypeaheadInput, + TypeaheadFieldState, + TypeaheadOutput, +} from './QueryField'; + +const EMPTY_METRIC = ''; +const METRIC_MARK = 'metric'; +const PRISM_LANGUAGE = 'promql'; + +export const wrapLabel = label => ({ label }); +export const setFunctionMove = (suggestion: Suggestion): Suggestion => { + suggestion.move = -1; + return suggestion; +}; + +export function willApplySuggestion( + suggestion: string, + { typeaheadContext, typeaheadText }: TypeaheadFieldState +): string { + // Modify suggestion based on context + switch (typeaheadContext) { + case 'context-labels': { + const nextChar = getNextCharacter(); + if (!nextChar || nextChar === '}' || nextChar === ',') { + suggestion += '='; + } + break; + } + + case 'context-label-values': { + // Always add quotes and remove existing ones instead + if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { + suggestion = `"${suggestion}`; + } + if (getNextCharacter() !== '"') { + suggestion = `${suggestion}"`; + } + break; + } + + default: + } + return suggestion; +} + +interface PromQueryFieldProps { + initialQuery?: string | null; + labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics?: string[]; + onPressEnter?: () => void; + onQueryChange?: (value: string) => void; + portalPrefix?: string; + request?: (url: string) => any; +} + +interface PromQueryFieldState { + labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics: string[]; +} + +interface PromTypeaheadInput { + text: string; + prefix: string; + wrapperClasses: string[]; + metric?: string; + labelKey?: string; +} + +class PromQueryField extends React.Component { + plugins: any[]; + + constructor(props, context) { + super(props, context); + + this.plugins = [ + RunnerPlugin({ handler: props.onPressEnter }), + PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }), + ]; + + this.state = { + labelKeys: props.labelKeys || {}, + labelValues: props.labelValues || {}, + metrics: props.metrics || [], + }; + } + + componentDidMount() { + this.fetchMetricNames(); + } + + onChangeQuery = value => { + // Send text change to parent + const { onQueryChange } = this.props; + if (onQueryChange) { + onQueryChange(value); + } + }; + + onReceiveMetrics = () => { + if (!this.state.metrics) { + return; + } + setPrismTokens(PRISM_LANGUAGE, METRIC_MARK, this.state.metrics); + }; + + onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => { + const { editorNode, prefix, text, wrapperNode } = typeahead; + + // Get DOM-dependent context + const wrapperClasses = Array.from(wrapperNode.classList); + // Take first metric as lucky guess + const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); + const metric = metricNode && metricNode.textContent; + const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); + const labelKey = labelKeyNode && labelKeyNode.textContent; + + const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey }); + + console.log('handleTypeahead', wrapperClasses, text, prefix, result.context); + + return result; + }; + + // Keep this DOM-free for testing + getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput { + // Determine candidates by CSS context + if (_.includes(wrapperClasses, 'context-range')) { + // Suggestions for metric[|] + return this.getRangeTypeahead(); + } else if (_.includes(wrapperClasses, 'context-labels')) { + // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|} + return this.getLabelTypeahead.apply(this, arguments); + } else if (metric && _.includes(wrapperClasses, 'context-aggregation')) { + return this.getAggregationTypeahead.apply(this, arguments); + } else if ( + // Non-empty but not inside known token unless it's a metric + (prefix && !_.includes(wrapperClasses, 'token')) || + prefix === metric || + (prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')' + text.match(/[+\-*/^%]/) // After binary operator + ) { + return this.getEmptyTypeahead(); + } + + return { + suggestions: [], + }; + } + + getEmptyTypeahead(): TypeaheadOutput { + const suggestions: SuggestionGroup[] = []; + suggestions.push({ + prefixMatch: true, + label: 'Functions', + items: FUNCTIONS.map(setFunctionMove), + }); + + if (this.state.metrics) { + suggestions.push({ + label: 'Metrics', + items: this.state.metrics.map(wrapLabel), + }); + } + return { suggestions }; + } + + getRangeTypeahead(): TypeaheadOutput { + return { + context: 'context-range', + suggestions: [ + { + label: 'Range vector', + items: [...RATE_RANGES].map(wrapLabel), + }, + ], + }; + } + + getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput { + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } else { + refresher = this.fetchMetricLabels(metric); + } + + return { + refresher, + suggestions, + context: 'context-aggregation', + }; + } + + getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput { + let context: string; + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + if (metric) { + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + const labelValues = this.state.labelValues[metric][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } else { + refresher = this.fetchMetricLabels(metric); + } + } else { + // Metric-independent label queries + const defaultKeys = ['job', 'instance']; + // Munge all keys that we have seen together + const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { + return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); + }, defaultKeys); + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + if (this.state.labelValues[EMPTY_METRIC]) { + const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } else { + // Can only query label values for now (API to query keys is under development) + refresher = this.fetchLabelValues(labelKey); + } + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } + return { context, refresher, suggestions }; + } + + request = url => { + if (this.props.request) { + return this.props.request(url); + } + return fetch(url); + }; + + async fetchLabelValues(key) { + const url = `/api/v1/label/${key}/values`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const pairs = this.state.labelValues[EMPTY_METRIC]; + const values = { + ...pairs, + [key]: body.data, + }; + const labelValues = { + ...this.state.labelValues, + [EMPTY_METRIC]: values, + }; + this.setState({ labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricLabels(name) { + const url = `/api/v1/series?match[]=${name}`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const { keys, values } = processLabels(body.data); + const labelKeys = { + ...this.state.labelKeys, + [name]: keys, + }; + const labelValues = { + ...this.state.labelValues, + [name]: values, + }; + this.setState({ labelKeys, labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricNames() { + const url = '/api/v1/label/__name__/values'; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + this.setState({ metrics: body.data }, this.onReceiveMetrics); + } catch (error) { + console.error(error); + } + } + + render() { + return ( + + ); + } +} + +export default PromQueryField; diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index 41f6d53541c68..60caddcad319c 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -1,106 +1,163 @@ +import _ from 'lodash'; import React from 'react'; import ReactDOM from 'react-dom'; -import { Value } from 'slate'; +import { Block, Change, Document, Text, Value } from 'slate'; import { Editor } from 'slate-react'; import Plain from 'slate-plain-serializer'; -// dom also includes Element polyfills -import { getNextCharacter, getPreviousCousin } from './utils/dom'; import BracesPlugin from './slate-plugins/braces'; import ClearPlugin from './slate-plugins/clear'; import NewlinePlugin from './slate-plugins/newline'; -import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; -import RunnerPlugin from './slate-plugins/runner'; -import debounce from './utils/debounce'; -import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; import Typeahead from './Typeahead'; -const EMPTY_METRIC = ''; -const METRIC_MARK = 'metric'; export const TYPEAHEAD_DEBOUNCE = 300; -function flattenSuggestions(s) { +function flattenSuggestions(s: any[]): any[] { return s ? s.reduce((acc, g) => acc.concat(g.items), []) : []; } -export const getInitialValue = query => - Value.fromJSON({ - document: { - nodes: [ - { - object: 'block', - type: 'paragraph', - nodes: [ - { - object: 'text', - leaves: [ - { - text: query, - }, - ], - }, - ], - }, - ], - }, +export const makeFragment = (text: string): Document => { + const lines = text.split('\n').map(line => + Block.create({ + type: 'paragraph', + nodes: [Text.create(line)], + }) + ); + + const fragment = Document.create({ + nodes: lines, }); + return fragment; +}; + +export const getInitialValue = (value: string): Value => Value.create({ document: makeFragment(value) }); + +export interface Suggestion { + /** + * The label of this completion item. By default + * this is also the text that is inserted when selecting + * this completion. + */ + label: string; + /** + * The kind of this completion item. Based on the kind + * an icon is chosen by the editor. + */ + kind?: string; + /** + * A human-readable string with additional information + * about this item, like type or symbol information. + */ + detail?: string; + /** + * A human-readable string, can be Markdown, that represents a doc-comment. + */ + documentation?: string; + /** + * A string that should be used when comparing this item + * with other items. When `falsy` the `label` is used. + */ + sortText?: string; + /** + * A string that should be used when filtering a set of + * completion items. When `falsy` the `label` is used. + */ + filterText?: string; + /** + * A string or snippet that should be inserted in a document when selecting + * this completion. When `falsy` the `label` is used. + */ + insertText?: string; + /** + * Delete number of characters before the caret position, + * by default the letters from the beginning of the word. + */ + deleteBackwards?: number; + /** + * Number of steps to move after the insertion, can be negative. + */ + move?: number; +} -class Portal extends React.Component { - node: any; +export interface SuggestionGroup { + /** + * Label that will be displayed for all entries of this group. + */ + label: string; + /** + * List of suggestions of this group. + */ + items: Suggestion[]; + /** + * If true, match only by prefix (and not mid-word). + */ + prefixMatch?: boolean; + /** + * If true, do not filter items in this group based on the search. + */ + skipFilter?: boolean; +} - constructor(props) { - super(props); - const { index = 0, prefix = 'query' } = props; - this.node = document.createElement('div'); - this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); - document.body.appendChild(this.node); - } +interface TypeaheadFieldProps { + additionalPlugins?: any[]; + cleanText?: (text: string) => string; + initialValue: string | null; + onBlur?: () => void; + onFocus?: () => void; + onTypeahead?: (typeahead: TypeaheadInput) => TypeaheadOutput; + onValueChanged?: (value: Value) => void; + onWillApplySuggestion?: (suggestion: string, state: TypeaheadFieldState) => string; + placeholder?: string; + portalPrefix?: string; +} - componentWillUnmount() { - document.body.removeChild(this.node); - } +export interface TypeaheadFieldState { + suggestions: SuggestionGroup[]; + typeaheadContext: string | null; + typeaheadIndex: number; + typeaheadPrefix: string; + typeaheadText: string; + value: Value; +} - render() { - return ReactDOM.createPortal(this.props.children, this.node); - } +export interface TypeaheadInput { + editorNode: Element; + prefix: string; + selection?: Selection; + text: string; + wrapperNode: Element; +} + +export interface TypeaheadOutput { + context?: string; + refresher?: Promise<{}>; + suggestions: SuggestionGroup[]; } -class QueryField extends React.Component { - menuEl: any; - plugins: any; +class QueryField extends React.Component { + menuEl: HTMLElement | null; + plugins: any[]; resetTimer: any; constructor(props, context) { super(props, context); - const { prismDefinition = {}, prismLanguage = 'promql' } = props; - - this.plugins = [ - BracesPlugin(), - ClearPlugin(), - RunnerPlugin({ handler: props.onPressEnter }), - NewlinePlugin(), - PluginPrism({ definition: prismDefinition, language: prismLanguage }), - ]; + // Base plugins + this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins]; this.state = { - labelKeys: {}, - labelValues: {}, - metrics: props.metrics || [], suggestions: [], + typeaheadContext: null, typeaheadIndex: 0, typeaheadPrefix: '', - value: getInitialValue(props.initialQuery || ''), + typeaheadText: '', + value: getInitialValue(props.initialValue || ''), }; } componentDidMount() { this.updateMenu(); - - if (this.props.metrics === undefined) { - this.fetchMetricNames(); - } } componentWillUnmount() { @@ -112,12 +169,9 @@ class QueryField extends React.Component { } componentWillReceiveProps(nextProps) { - if (nextProps.metrics && nextProps.metrics !== this.props.metrics) { - this.setState({ metrics: nextProps.metrics }, this.onMetricsReceived); - } - // initialQuery is null in case the user typed - if (nextProps.initialQuery !== null && nextProps.initialQuery !== this.props.initialQuery) { - this.setState({ value: getInitialValue(nextProps.initialQuery) }); + // initialValue is null in case the user typed + if (nextProps.initialValue !== null && nextProps.initialValue !== this.props.initialValue) { + this.setState({ value: getInitialValue(nextProps.initialValue) }); } } @@ -125,48 +179,28 @@ class QueryField extends React.Component { const changed = value.document !== this.state.value.document; this.setState({ value }, () => { if (changed) { - this.handleChangeQuery(); + this.handleChangeValue(); } }); - window.requestAnimationFrame(this.handleTypeahead); - }; - - onMetricsReceived = () => { - if (!this.state.metrics) { - return; - } - setPrismTokens(this.props.prismLanguage, METRIC_MARK, this.state.metrics); - - // Trigger re-render - window.requestAnimationFrame(() => { - // Bogus edit to trigger highlighting - const change = this.state.value - .change() - .insertText(' ') - .deleteBackward(1); - this.onChange(change); - }); - }; - - request = url => { - if (this.props.request) { - return this.props.request(url); + if (changed) { + window.requestAnimationFrame(this.handleTypeahead); } - return fetch(url); }; - handleChangeQuery = () => { + handleChangeValue = () => { // Send text change to parent - const { onQueryChange } = this.props; - if (onQueryChange) { - onQueryChange(Plain.serialize(this.state.value)); + const { onValueChanged } = this.props; + if (onValueChanged) { + onValueChanged(Plain.serialize(this.state.value)); } }; - handleTypeahead = debounce(() => { + handleTypeahead = _.debounce(async () => { const selection = window.getSelection(); - if (selection.anchorNode) { + const { cleanText, onTypeahead } = this.props; + + if (onTypeahead && selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; const editorNode = wrapperNode.closest('.slate-query-field'); if (!editorNode || this.state.value.isBlurred) { @@ -175,164 +209,96 @@ class QueryField extends React.Component { } const range = selection.getRangeAt(0); - const text = selection.anchorNode.textContent; const offset = range.startOffset; - const prefix = cleanText(text.substr(0, offset)); - - // Determine candidates by context - const suggestionGroups = []; - const wrapperClasses = wrapperNode.classList; - let typeaheadContext = null; - - // Take first metric as lucky guess - const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); - - if (wrapperClasses.contains('context-range')) { - // Rate ranges - typeaheadContext = 'context-range'; - suggestionGroups.push({ - label: 'Range vector', - items: [...RATE_RANGES], - }); - } else if (wrapperClasses.contains('context-labels') && metricNode) { - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - const labelValues = this.state.labelValues[metric][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else { - this.fetchMetricLabels(metric); - } - } else if (wrapperClasses.contains('context-labels') && !metricNode) { - // Empty name queries - const defaultKeys = ['job', 'instance']; - // Munge all keys that we have seen together - const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { - return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); - }, defaultKeys); - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - if (this.state.labelValues[EMPTY_METRIC]) { - const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } else { - // Can only query label values for now (API to query keys is under development) - this.fetchLabelValues(labelKey); - } - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else if (metricNode && wrapperClasses.contains('context-aggregation')) { - typeaheadContext = 'context-aggregation'; - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } else { - this.fetchMetricLabels(metric); - } - } else if ( - (this.state.metrics && ((prefix && !wrapperClasses.contains('token')) || text.match(/[+\-*/^%]/))) || - wrapperClasses.contains('context-function') - ) { - // Need prefix for metrics - typeaheadContext = 'context-metrics'; - suggestionGroups.push({ - label: 'Metrics', - items: this.state.metrics, - }); + const text = selection.anchorNode.textContent; + let prefix = text.substr(0, offset); + if (cleanText) { + prefix = cleanText(prefix); } - let results = 0; - const filteredSuggestions = suggestionGroups.map(group => { - if (group.items) { - group.items = group.items.filter(c => c.length !== prefix.length && c.indexOf(prefix) > -1); - results += group.items.length; - } - return group; + const { suggestions, context, refresher } = onTypeahead({ + editorNode, + prefix, + selection, + text, + wrapperNode, }); - console.log('handleTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - - this.setState({ - typeaheadPrefix: prefix, - typeaheadContext, - typeaheadText: text, - suggestions: results > 0 ? filteredSuggestions : [], - }); - } - }, TYPEAHEAD_DEBOUNCE); + const filteredSuggestions = suggestions + .map(group => { + if (group.items) { + if (prefix) { + // Filter groups based on prefix + if (!group.skipFilter) { + group.items = group.items.filter(c => (c.filterText || c.label).length >= prefix.length); + if (group.prefixMatch) { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) === 0); + } else { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) > -1); + } + } + // Filter out the already typed value (prefix) unless it inserts custom text + group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix); + } - applyTypeahead(change, suggestion) { - const { typeaheadPrefix, typeaheadContext, typeaheadText } = this.state; + group.items = _.sortBy(group.items, item => item.sortText || item.label); + } + return group; + }) + .filter(group => group.items && group.items.length > 0); // Filter out empty groups - // Modify suggestion based on context - switch (typeaheadContext) { - case 'context-labels': { - const nextChar = getNextCharacter(); - if (!nextChar || nextChar === '}' || nextChar === ',') { - suggestion += '='; + this.setState( + { + suggestions: filteredSuggestions, + typeaheadPrefix: prefix, + typeaheadContext: context, + typeaheadText: text, + }, + () => { + if (refresher) { + refresher.then(this.handleTypeahead).catch(e => console.error(e)); + } } - break; - } + ); + } + }, TYPEAHEAD_DEBOUNCE); - case 'context-label-values': { - // Always add quotes and remove existing ones instead - if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { - suggestion = `"${suggestion}`; - } - if (getNextCharacter() !== '"') { - suggestion = `${suggestion}"`; - } - break; - } + applyTypeahead(change: Change, suggestion: Suggestion): Change { + const { cleanText, onWillApplySuggestion } = this.props; + const { typeaheadPrefix, typeaheadText } = this.state; + let suggestionText = suggestion.insertText || suggestion.label; + const move = suggestion.move || 0; - default: + if (onWillApplySuggestion) { + suggestionText = onWillApplySuggestion(suggestionText, { ...this.state }); } this.resetTypeahead(); // Remove the current, incomplete text and replace it with the selected suggestion - let backward = typeaheadPrefix.length; - const text = cleanText(typeaheadText); + const backward = suggestion.deleteBackwards || typeaheadPrefix.length; + const text = cleanText ? cleanText(typeaheadText) : typeaheadText; const suffixLength = text.length - typeaheadPrefix.length; const offset = typeaheadText.indexOf(typeaheadPrefix); - const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestion === typeaheadText); + const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestionText === typeaheadText); const forward = midWord ? suffixLength + offset : 0; - return ( - change - // TODO this line breaks if cursor was moved left and length is longer than whole prefix + // If new-lines, apply suggestion as block + if (suggestionText.match(/\n/)) { + const fragment = makeFragment(suggestionText); + return change .deleteBackward(backward) .deleteForward(forward) - .insertText(suggestion) - .focus() - ); + .insertFragment(fragment) + .focus(); + } + + return change + .deleteBackward(backward) + .deleteForward(forward) + .insertText(suggestionText) + .move(move) + .focus(); } onKeyDown = (event, change) => { @@ -413,74 +379,6 @@ class QueryField extends React.Component { }); }; - async fetchLabelValues(key) { - const url = `/api/v1/label/${key}/values`; - try { - const res = await this.request(url); - console.log(res); - const body = await (res.data || res.json()); - const pairs = this.state.labelValues[EMPTY_METRIC]; - const values = { - ...pairs, - [key]: body.data, - }; - // const labelKeys = { - // ...this.state.labelKeys, - // [EMPTY_METRIC]: keys, - // }; - const labelValues = { - ...this.state.labelValues, - [EMPTY_METRIC]: values, - }; - this.setState({ labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricLabels(name) { - const url = `/api/v1/series?match[]=${name}`; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - const { keys, values } = processLabels(body.data); - const labelKeys = { - ...this.state.labelKeys, - [name]: keys, - }; - const labelValues = { - ...this.state.labelValues, - [name]: values, - }; - this.setState({ labelKeys, labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricNames() { - const url = '/api/v1/label/__name__/values'; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - this.setState({ metrics: body.data }, this.onMetricsReceived); - } catch (error) { - if (this.props.onRequestError) { - this.props.onRequestError(error); - } else { - console.error(error); - } - } - } - handleBlur = () => { const { onBlur } = this.props; // If we dont wait here, menu clicks wont work because the menu @@ -498,7 +396,7 @@ class QueryField extends React.Component { } }; - handleClickMenu = item => { + onClickMenu = (item: Suggestion) => { // Manually triggering change const change = this.applyTypeahead(this.state.value.change(), item); this.onChange(change); @@ -531,7 +429,7 @@ class QueryField extends React.Component { // Write DOM requestAnimationFrame(() => { - menu.style.opacity = 1; + menu.style.opacity = '1'; menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; menu.style.left = `${rect.left + scrollX - 2}px`; }); @@ -554,17 +452,16 @@ class QueryField extends React.Component { let selectedIndex = Math.max(this.state.typeaheadIndex, 0); const flattenedSuggestions = flattenSuggestions(suggestions); selectedIndex = selectedIndex % flattenedSuggestions.length || 0; - const selectedKeys = (flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []).map( - i => (typeof i === 'object' ? i.text : i) - ); + const selectedItem: Suggestion | null = + flattenedSuggestions.length > 0 ? flattenedSuggestions[selectedIndex] : null; // Create typeahead in DOM root so we can later position it absolutely return ( @@ -591,4 +488,24 @@ class QueryField extends React.Component { } } +class Portal extends React.Component<{ index?: number; prefix: string }, {}> { + node: HTMLElement; + + constructor(props) { + super(props); + const { index = 0, prefix = 'query' } = props; + this.node = document.createElement('div'); + this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); + document.body.appendChild(this.node); + } + + componentWillUnmount() { + document.body.removeChild(this.node); + } + + render() { + return ReactDOM.createPortal(this.props.children, this.node); + } +} + export default QueryField; diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/containers/Explore/QueryRows.tsx index a968e1e2c6411..3aaa006d6df15 100644 --- a/public/app/containers/Explore/QueryRows.tsx +++ b/public/app/containers/Explore/QueryRows.tsx @@ -1,7 +1,6 @@ import React, { PureComponent } from 'react'; -import promql from './slate-plugins/prism/promql'; -import QueryField from './QueryField'; +import QueryField from './PromQueryField'; class QueryRow extends PureComponent { constructor(props) { @@ -62,9 +61,6 @@ class QueryRow extends PureComponent { portalPrefix="explore" onPressEnter={this.handlePressEnter} onQueryChange={this.handleChangeQuery} - placeholder="Enter a PromQL query" - prismLanguage="promql" - prismDefinition={promql} request={request} /> diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index 44fce7f8c7eb9..9924488035c9f 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -1,17 +1,26 @@ import React from 'react'; -function scrollIntoView(el) { +import { Suggestion, SuggestionGroup } from './QueryField'; + +function scrollIntoView(el: HTMLElement) { if (!el || !el.offsetParent) { return; } - const container = el.offsetParent; + const container = el.offsetParent as HTMLElement; if (el.offsetTop > container.scrollTop + container.offsetHeight || el.offsetTop < container.scrollTop) { container.scrollTop = el.offsetTop - container.offsetTop; } } -class TypeaheadItem extends React.PureComponent { - el: any; +interface TypeaheadItemProps { + isSelected: boolean; + item: Suggestion; + onClickItem: (Suggestion) => void; +} + +class TypeaheadItem extends React.PureComponent { + el: HTMLElement; + componentDidUpdate(prevProps) { if (this.props.isSelected && !prevProps.isSelected) { scrollIntoView(this.el); @@ -22,20 +31,30 @@ class TypeaheadItem extends React.PureComponent { this.el = el; }; + onClick = () => { + this.props.onClickItem(this.props.item); + }; + render() { - const { hint, isSelected, label, onClickItem } = this.props; + const { isSelected, item } = this.props; const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; - const onClick = () => onClickItem(label); return ( -
  • - {label} - {hint && isSelected ?
    {hint}
    : null} +
  • + {item.detail || item.label} + {item.documentation && isSelected ?
    {item.documentation}
    : null}
  • ); } } -class TypeaheadGroup extends React.PureComponent { +interface TypeaheadGroupProps { + items: Suggestion[]; + label: string; + onClickItem: (Suggestion) => void; + selected: Suggestion; +} + +class TypeaheadGroup extends React.PureComponent { render() { const { items, label, selected, onClickItem } = this.props; return ( @@ -43,16 +62,8 @@ class TypeaheadGroup extends React.PureComponent {
    {label}
      {items.map(item => { - const text = typeof item === 'object' ? item.text : item; - const label = typeof item === 'object' ? item.display || item.text : item; return ( - -1} - hint={item.hint} - label={label} - /> + ); })}
    @@ -61,13 +72,19 @@ class TypeaheadGroup extends React.PureComponent { } } -class Typeahead extends React.PureComponent { +interface TypeaheadProps { + groupedItems: SuggestionGroup[]; + menuRef: any; + selectedItem: Suggestion | null; + onClickItem: (Suggestion) => void; +} +class Typeahead extends React.PureComponent { render() { - const { groupedItems, menuRef, selectedItems, onClickItem } = this.props; + const { groupedItems, menuRef, selectedItem, onClickItem } = this.props; return (
      {groupedItems.map(g => ( - + ))}
    ); diff --git a/public/app/containers/Explore/slate-plugins/prism/promql.ts b/public/app/containers/Explore/slate-plugins/prism/promql.ts index 0f0be18cb6fe1..a17c5fbc4f6c9 100644 --- a/public/app/containers/Explore/slate-plugins/prism/promql.ts +++ b/public/app/containers/Explore/slate-plugins/prism/promql.ts @@ -1,67 +1,368 @@ +/* tslint:disable max-line-length */ + export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without']; const AGGREGATION_OPERATORS = [ - 'sum', - 'min', - 'max', - 'avg', - 'stddev', - 'stdvar', - 'count', - 'count_values', - 'bottomk', - 'topk', - 'quantile', + { + label: 'sum', + insertText: 'sum()', + documentation: 'Calculate sum over dimensions', + }, + { + label: 'min', + insertText: 'min()', + documentation: 'Select minimum over dimensions', + }, + { + label: 'max', + insertText: 'max()', + documentation: 'Select maximum over dimensions', + }, + { + label: 'avg', + insertText: 'avg()', + documentation: 'Calculate the average over dimensions', + }, + { + label: 'stddev', + insertText: 'stddev()', + documentation: 'Calculate population standard deviation over dimensions', + }, + { + label: 'stdvar', + insertText: 'stdvar()', + documentation: 'Calculate population standard variance over dimensions', + }, + { + label: 'count', + insertText: 'count()', + documentation: 'Count number of elements in the vector', + }, + { + label: 'count_values', + insertText: 'count_values()', + documentation: 'Count number of elements with the same value', + }, + { + label: 'bottomk', + insertText: 'bottomk()', + documentation: 'Smallest k elements by sample value', + }, + { + label: 'topk', + insertText: 'topk()', + documentation: 'Largest k elements by sample value', + }, + { + label: 'quantile', + insertText: 'quantile()', + documentation: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions', + }, ]; export const FUNCTIONS = [ ...AGGREGATION_OPERATORS, - 'abs', - 'absent', - 'ceil', - 'changes', - 'clamp_max', - 'clamp_min', - 'count_scalar', - 'day_of_month', - 'day_of_week', - 'days_in_month', - 'delta', - 'deriv', - 'drop_common_labels', - 'exp', - 'floor', - 'histogram_quantile', - 'holt_winters', - 'hour', - 'idelta', - 'increase', - 'irate', - 'label_replace', - 'ln', - 'log2', - 'log10', - 'minute', - 'month', - 'predict_linear', - 'rate', - 'resets', - 'round', - 'scalar', - 'sort', - 'sort_desc', - 'sqrt', - 'time', - 'vector', - 'year', - 'avg_over_time', - 'min_over_time', - 'max_over_time', - 'sum_over_time', - 'count_over_time', - 'quantile_over_time', - 'stddev_over_time', - 'stdvar_over_time', + { + insertText: 'abs()', + label: 'abs', + detail: 'abs(v instant-vector)', + documentation: 'Returns the input vector with all sample values converted to their absolute value.', + }, + { + insertText: 'absent()', + label: 'absent', + detail: 'absent(v instant-vector)', + documentation: + 'Returns an empty vector if the vector passed to it has any elements and a 1-element vector with the value 1 if the vector passed to it has no elements. This is useful for alerting on when no time series exist for a given metric name and label combination.', + }, + { + insertText: 'ceil()', + label: 'ceil', + detail: 'ceil(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` up to the nearest integer.', + }, + { + insertText: 'changes()', + label: 'changes', + detail: 'changes(v range-vector)', + documentation: + 'For each input time series, `changes(v range-vector)` returns the number of times its value has changed within the provided time range as an instant vector.', + }, + { + insertText: 'clamp_max()', + label: 'clamp_max', + detail: 'clamp_max(v instant-vector, max scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have an upper limit of `max`.', + }, + { + insertText: 'clamp_min()', + label: 'clamp_min', + detail: 'clamp_min(v instant-vector, min scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have a lower limit of `min`.', + }, + { + insertText: 'count_scalar()', + label: 'count_scalar', + detail: 'count_scalar(v instant-vector)', + documentation: + 'Returns the number of elements in a time series vector as a scalar. This is in contrast to the `count()` aggregation operator, which always returns a vector (an empty one if the input vector is empty) and allows grouping by labels via a `by` clause.', + }, + { + insertText: 'day_of_month()', + label: 'day_of_month', + detail: 'day_of_month(v=vector(time()) instant-vector)', + documentation: 'Returns the day of the month for each of the given times in UTC. Returned values are from 1 to 31.', + }, + { + insertText: 'day_of_week()', + label: 'day_of_week', + detail: 'day_of_week(v=vector(time()) instant-vector)', + documentation: + 'Returns the day of the week for each of the given times in UTC. Returned values are from 0 to 6, where 0 means Sunday etc.', + }, + { + insertText: 'days_in_month()', + label: 'days_in_month', + detail: 'days_in_month(v=vector(time()) instant-vector)', + documentation: + 'Returns number of days in the month for each of the given times in UTC. Returned values are from 28 to 31.', + }, + { + insertText: 'delta()', + label: 'delta', + detail: 'delta(v range-vector)', + documentation: + 'Calculates the difference between the first and last value of each time series element in a range vector `v`, returning an instant vector with the given deltas and equivalent labels. The delta is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if the sample values are all integers.', + }, + { + insertText: 'deriv()', + label: 'deriv', + detail: 'deriv(v range-vector)', + documentation: + 'Calculates the per-second derivative of the time series in a range vector `v`, using simple linear regression.', + }, + { + insertText: 'drop_common_labels()', + label: 'drop_common_labels', + detail: 'drop_common_labels(instant-vector)', + documentation: 'Drops all labels that have the same name and value across all series in the input vector.', + }, + { + insertText: 'exp()', + label: 'exp', + detail: 'exp(v instant-vector)', + documentation: + 'Calculates the exponential function for all elements in `v`.\nSpecial cases are:\n* `Exp(+Inf) = +Inf` \n* `Exp(NaN) = NaN`', + }, + { + insertText: 'floor()', + label: 'floor', + detail: 'floor(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` down to the nearest integer.', + }, + { + insertText: 'histogram_quantile()', + label: 'histogram_quantile', + detail: 'histogram_quantile(φ float, b instant-vector)', + documentation: + 'Calculates the φ-quantile (0 ≤ φ ≤ 1) from the buckets `b` of a histogram. The samples in `b` are the counts of observations in each bucket. Each sample must have a label `le` where the label value denotes the inclusive upper bound of the bucket. (Samples without such a label are silently ignored.) The histogram metric type automatically provides time series with the `_bucket` suffix and the appropriate labels.', + }, + { + insertText: 'holt_winters()', + label: 'holt_winters', + detail: 'holt_winters(v range-vector, sf scalar, tf scalar)', + documentation: + 'Produces a smoothed value for time series based on the range in `v`. The lower the smoothing factor `sf`, the more importance is given to old data. The higher the trend factor `tf`, the more trends in the data is considered. Both `sf` and `tf` must be between 0 and 1.', + }, + { + insertText: 'hour()', + label: 'hour', + detail: 'hour(v=vector(time()) instant-vector)', + documentation: 'Returns the hour of the day for each of the given times in UTC. Returned values are from 0 to 23.', + }, + { + insertText: 'idelta()', + label: 'idelta', + detail: 'idelta(v range-vector)', + documentation: + 'Calculates the difference between the last two samples in the range vector `v`, returning an instant vector with the given deltas and equivalent labels.', + }, + { + insertText: 'increase()', + label: 'increase', + detail: 'increase(v range-vector)', + documentation: + 'Calculates the increase in the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. The increase is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if a counter increases only by integer increments.', + }, + { + insertText: 'irate()', + label: 'irate', + detail: 'irate(v range-vector)', + documentation: + 'Calculates the per-second instant rate of increase of the time series in the range vector. This is based on the last two data points. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.', + }, + { + insertText: 'label_replace()', + label: 'label_replace', + detail: 'label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)', + documentation: + "For each timeseries in `v`, `label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)` matches the regular expression `regex` against the label `src_label`. If it matches, then the timeseries is returned with the label `dst_label` replaced by the expansion of `replacement`. `$1` is replaced with the first matching subgroup, `$2` with the second etc. If the regular expression doesn't match then the timeseries is returned unchanged.", + }, + { + insertText: 'ln()', + label: 'ln', + detail: 'ln(v instant-vector)', + documentation: + 'calculates the natural logarithm for all elements in `v`.\nSpecial cases are:\n * `ln(+Inf) = +Inf`\n * `ln(0) = -Inf`\n * `ln(x < 0) = NaN`\n * `ln(NaN) = NaN`', + }, + { + insertText: 'log2()', + label: 'log2', + detail: 'log2(v instant-vector)', + documentation: + 'Calculates the binary logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'log10()', + label: 'log10', + detail: 'log10(v instant-vector)', + documentation: + 'Calculates the decimal logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'minute()', + label: 'minute', + detail: 'minute(v=vector(time()) instant-vector)', + documentation: + 'Returns the minute of the hour for each of the given times in UTC. Returned values are from 0 to 59.', + }, + { + insertText: 'month()', + label: 'month', + detail: 'month(v=vector(time()) instant-vector)', + documentation: + 'Returns the month of the year for each of the given times in UTC. Returned values are from 1 to 12, where 1 means January etc.', + }, + { + insertText: 'predict_linear()', + label: 'predict_linear', + detail: 'predict_linear(v range-vector, t scalar)', + documentation: + 'Predicts the value of time series `t` seconds from now, based on the range vector `v`, using simple linear regression.', + }, + { + insertText: 'rate()', + label: 'rate', + detail: 'rate(v range-vector)', + documentation: + "Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.", + }, + { + insertText: 'resets()', + label: 'resets', + detail: 'resets(v range-vector)', + documentation: + 'For each input time series, `resets(v range-vector)` returns the number of counter resets within the provided time range as an instant vector. Any decrease in the value between two consecutive samples is interpreted as a counter reset.', + }, + { + insertText: 'round()', + label: 'round', + detail: 'round(v instant-vector, to_nearest=1 scalar)', + documentation: + 'Rounds the sample values of all elements in `v` to the nearest integer. Ties are resolved by rounding up. The optional `to_nearest` argument allows specifying the nearest multiple to which the sample values should be rounded. This multiple may also be a fraction.', + }, + { + insertText: 'scalar()', + label: 'scalar', + detail: 'scalar(v instant-vector)', + documentation: + 'Given a single-element input vector, `scalar(v instant-vector)` returns the sample value of that single element as a scalar. If the input vector does not have exactly one element, `scalar` will return `NaN`.', + }, + { + insertText: 'sort()', + label: 'sort', + detail: 'sort(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in ascending order.', + }, + { + insertText: 'sort_desc()', + label: 'sort_desc', + detail: 'sort_desc(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in descending order.', + }, + { + insertText: 'sqrt()', + label: 'sqrt', + detail: 'sqrt(v instant-vector)', + documentation: 'Calculates the square root of all elements in `v`.', + }, + { + insertText: 'time()', + label: 'time', + detail: 'time()', + documentation: + 'Returns the number of seconds since January 1, 1970 UTC. Note that this does not actually return the current time, but the time at which the expression is to be evaluated.', + }, + { + insertText: 'vector()', + label: 'vector', + detail: 'vector(s scalar)', + documentation: 'Returns the scalar `s` as a vector with no labels.', + }, + { + insertText: 'year()', + label: 'year', + detail: 'year(v=vector(time()) instant-vector)', + documentation: 'Returns the year for each of the given times in UTC.', + }, + { + insertText: 'avg_over_time()', + label: 'avg_over_time', + detail: 'avg_over_time(range-vector)', + documentation: 'The average value of all points in the specified interval.', + }, + { + insertText: 'min_over_time()', + label: 'min_over_time', + detail: 'min_over_time(range-vector)', + documentation: 'The minimum value of all points in the specified interval.', + }, + { + insertText: 'max_over_time()', + label: 'max_over_time', + detail: 'max_over_time(range-vector)', + documentation: 'The maximum value of all points in the specified interval.', + }, + { + insertText: 'sum_over_time()', + label: 'sum_over_time', + detail: 'sum_over_time(range-vector)', + documentation: 'The sum of all values in the specified interval.', + }, + { + insertText: 'count_over_time()', + label: 'count_over_time', + detail: 'count_over_time(range-vector)', + documentation: 'The count of all values in the specified interval.', + }, + { + insertText: 'quantile_over_time()', + label: 'quantile_over_time', + detail: 'quantile_over_time(scalar, range-vector)', + documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.', + }, + { + insertText: 'stddev_over_time()', + label: 'stddev_over_time', + detail: 'stddev_over_time(range-vector)', + documentation: 'The population standard deviation of the values in the specified interval.', + }, + { + insertText: 'stdvar_over_time()', + label: 'stdvar_over_time', + detail: 'stdvar_over_time(range-vector)', + documentation: 'The population standard variance of the values in the specified interval.', + }, ]; const tokenizer = { @@ -93,7 +394,7 @@ const tokenizer = { }, }, }, - function: new RegExp(`\\b(?:${FUNCTIONS.join('|')})(?=\\s*\\()`, 'i'), + function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'), 'context-range': [ { pattern: /\[[^\]]*(?=])/, // [1m] diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index 119c468292a48..10b2238f4b8d7 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -71,6 +71,7 @@ .typeahead-item-hint { font-size: $font-size-xs; color: $text-color; + white-space: normal; } } } From fc06f8bfe71d758148708dee23c52af678935a52 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 17:22:15 +0200 Subject: [PATCH 050/104] Pass more tests --- public/app/plugins/panel/singlestat/module.ts | 1 + .../panel/singlestat/specs/singlestat.jest.ts | 34 ++++++++----------- 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index ebd2628b0864c..7fafb5902d138 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,6 +310,7 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { + console.log(lastPoint, lastValue); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 2c945aa6eb239..7b89f86250c49 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -7,7 +7,7 @@ import moment from 'moment'; describe('SingleStatCtrl', function() { let ctx = {}; let epoch = 1505826363746; - let clock; + Date.now = () => epoch; let $scope = { $on: () => {}, @@ -24,7 +24,7 @@ describe('SingleStatCtrl', function() { }, }; SingleStatCtrl.prototype.dashboard = { - isTimezoneUtc: () => {}, + isTimezoneUtc: jest.fn(() => true), }; function singleStatScenario(desc, func) { @@ -89,29 +89,30 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; + ctx.ctrl.dashboard.isTimezoneUtc = () => false; }); it('Should use time instead of value', function() { - console.log(ctx.data.value); expect(ctx.data.value).toBe(1505634997920); expect(ctx.data.valueRounded).toBe(1505634997920); }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + expect(ctx.data.valueFormatted).toBe('2017-09-17 09:56:37'); }); }); singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; // ctx.setIsUtc(true); + ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + it('should set value', function() { + expect(ctx.data.valueFormatted).toBe('1970-01-01 00:00:05'); }); }); @@ -120,6 +121,7 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; + ctx.ctrl.dashboard.isTimezoneUtc = () => false; }); it('Should use time instead of value', function() { @@ -134,21 +136,22 @@ describe('SingleStatCtrl', function() { singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; // ctx.setIsUtc(true); + ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + expect(ctx.data.valueFormatted).toBe('01/01/1970 12:00:05 am'); }); }); singleStatScenario('showing last time from now instead of value', function(ctx) { beforeEach(() => { // clock = sinon.useFakeTimers(epoch); - jest.useFakeTimers(); + //jest.useFakeTimers(); }); ctx.setup(function() { @@ -167,16 +170,11 @@ describe('SingleStatCtrl', function() { }); afterEach(() => { - jest.clearAllTimers(); + // jest.clearAllTimers(); }); }); singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { - beforeEach(() => { - // clock = sinon.useFakeTimers(epoch); - jest.useFakeTimers(); - }); - ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; @@ -187,10 +185,6 @@ describe('SingleStatCtrl', function() { it('should set formatted value', function() { expect(ctx.data.valueFormatted).toBe('2 days ago'); }); - - afterEach(() => { - jest.clearAllTimers(); - }); }); singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( From d42cea5d42c58175448986a8682b7a8c137be088 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:09:42 +0200 Subject: [PATCH 051/104] refactor sql engine to make it hold all common code for sql datasources --- pkg/tsdb/sql_engine.go | 324 +++++++++++++++++++++++++++++++++++------ 1 file changed, 279 insertions(+), 45 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index ec908aeb9de8b..9321e8912dc57 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -1,11 +1,17 @@ package tsdb import ( + "container/list" "context" + "database/sql" "fmt" + "math" + "strings" "sync" "time" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/components/null" "github.com/go-xorm/core" @@ -14,27 +20,15 @@ import ( "github.com/grafana/grafana/pkg/models" ) -// SqlEngine is a wrapper class around xorm for relational database data sources. -type SqlEngine interface { - InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error - Query( - ctx context.Context, - ds *models.DataSource, - query *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - ) (*Response, error) -} - // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and // timeRange to be able to generate queries that use from and to. type SqlMacroEngine interface { Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error) } -type DefaultSqlEngine struct { - MacroEngine SqlMacroEngine - XormEngine *xorm.Engine +// SqlTableRowTransformer transforms a query result row to RowValues with proper types. +type SqlTableRowTransformer interface { + Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error) } type engineCacheType struct { @@ -48,69 +42,92 @@ var engineCache = engineCacheType{ versions: make(map[int64]int), } -// InitEngine creates the db connection and inits the xorm engine or loads it from the engine cache -func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error { +var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) { + return xorm.NewEngine(driverName, connectionString) +} + +type sqlQueryEndpoint struct { + macroEngine SqlMacroEngine + rowTransformer SqlTableRowTransformer + engine *xorm.Engine + timeColumnNames []string + metricColumnTypes []string + log log.Logger +} + +type SqlQueryEndpointConfiguration struct { + DriverName string + Datasource *models.DataSource + ConnectionString string + TimeColumnNames []string + MetricColumnTypes []string +} + +var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) { + queryEndpoint := sqlQueryEndpoint{ + rowTransformer: rowTransformer, + macroEngine: macroEngine, + timeColumnNames: []string{"time"}, + log: log, + } + + if len(config.TimeColumnNames) > 0 { + queryEndpoint.timeColumnNames = config.TimeColumnNames + } + engineCache.Lock() defer engineCache.Unlock() - if engine, present := engineCache.cache[dsInfo.Id]; present { - if version := engineCache.versions[dsInfo.Id]; version == dsInfo.Version { - e.XormEngine = engine - return nil + if engine, present := engineCache.cache[config.Datasource.Id]; present { + if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version { + queryEndpoint.engine = engine + return &queryEndpoint, nil } } - engine, err := xorm.NewEngine(driverName, cnnstr) + engine, err := NewXormEngine(config.DriverName, config.ConnectionString) if err != nil { - return err + return nil, err } engine.SetMaxOpenConns(10) engine.SetMaxIdleConns(10) - engineCache.versions[dsInfo.Id] = dsInfo.Version - engineCache.cache[dsInfo.Id] = engine - e.XormEngine = engine + engineCache.versions[config.Datasource.Id] = config.Datasource.Version + engineCache.cache[config.Datasource.Id] = engine + queryEndpoint.engine = engine - return nil + return &queryEndpoint, nil } -// Query is a default implementation of the Query method for an SQL data source. -// The caller of this function must implement transformToTimeSeries and transformToTable and -// pass them in as parameters. -func (e *DefaultSqlEngine) Query( - ctx context.Context, - dsInfo *models.DataSource, - tsdbQuery *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, -) (*Response, error) { +// Query is the main function for the SqlQueryEndpoint +func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) { result := &Response{ Results: make(map[string]*QueryResult), } - session := e.XormEngine.NewSession() + session := e.engine.NewSession() defer session.Close() db := session.DB() for _, query := range tsdbQuery.Queries { - rawSql := query.Model.Get("rawSql").MustString() - if rawSql == "" { + rawSQL := query.Model.Get("rawSql").MustString() + if rawSQL == "" { continue } queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId} result.Results[query.RefId] = queryResult - rawSql, err := e.MacroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSql) + rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL) if err != nil { queryResult.Error = err continue } - queryResult.Meta.Set("sql", rawSql) + queryResult.Meta.Set("sql", rawSQL) - rows, err := db.Query(rawSql) + rows, err := db.Query(rawSQL) if err != nil { queryResult.Error = err continue @@ -122,13 +139,13 @@ func (e *DefaultSqlEngine) Query( switch format { case "time_series": - err := transformToTimeSeries(query, rows, queryResult, tsdbQuery) + err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue } case "table": - err := transformToTable(query, rows, queryResult, tsdbQuery) + err := e.transformToTable(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue @@ -139,6 +156,223 @@ func (e *DefaultSqlEngine) Query( return result, nil } +func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error { + columnNames, err := rows.Columns() + columnCount := len(columnNames) + + if err != nil { + return err + } + + rowLimit := 1000000 + rowCount := 0 + timeIndex := -1 + + table := &Table{ + Columns: make([]TableColumn, columnCount), + Rows: make([]RowValues, 0), + } + + for i, name := range columnNames { + table.Columns[i].Text = name + + for _, tc := range e.timeColumnNames { + if name == tc { + timeIndex = i + break + } + } + } + + columnTypes, err := rows.ColumnTypes() + if err != nil { + return err + } + + for ; rows.Next(); rowCount++ { + if rowCount > rowLimit { + return fmt.Errorf("query row limit exceeded, limit %d", rowLimit) + } + + values, err := e.rowTransformer.Transform(columnTypes, rows) + if err != nil { + return err + } + + // converts column named time to unix timestamp in milliseconds + // to make native mssql datetime types and epoch dates work in + // annotation and table queries. + ConvertSqlTimeColumnToEpochMs(values, timeIndex) + table.Rows = append(table.Rows, values) + } + + result.Tables = append(result.Tables, table) + result.Meta.Set("rowCount", rowCount) + return nil +} + +func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error { + pointsBySeries := make(map[string]*TimeSeries) + seriesByQueryOrder := list.New() + + columnNames, err := rows.Columns() + if err != nil { + return err + } + + columnTypes, err := rows.ColumnTypes() + if err != nil { + return err + } + + rowLimit := 1000000 + rowCount := 0 + timeIndex := -1 + metricIndex := -1 + + // check columns of resultset: a column named time is mandatory + // the first text column is treated as metric name unless a column named metric is present + for i, col := range columnNames { + for _, tc := range e.timeColumnNames { + if col == tc { + timeIndex = i + continue + } + } + switch col { + case "metric": + metricIndex = i + default: + if metricIndex == -1 { + columnType := columnTypes[i].DatabaseTypeName() + + for _, mct := range e.metricColumnTypes { + if columnType == mct { + metricIndex = i + continue + } + } + } + } + } + + if timeIndex == -1 { + return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or ")) + } + + fillMissing := query.Model.Get("fill").MustBool(false) + var fillInterval float64 + fillValue := null.Float{} + if fillMissing { + fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 + if !query.Model.Get("fillNull").MustBool(false) { + fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() + fillValue.Valid = true + } + } + + for rows.Next() { + var timestamp float64 + var value null.Float + var metric string + + if rowCount > rowLimit { + return fmt.Errorf("query row limit exceeded, limit %d", rowLimit) + } + + values, err := e.rowTransformer.Transform(columnTypes, rows) + if err != nil { + return err + } + + // converts column named time to unix timestamp in milliseconds to make + // native mysql datetime types and epoch dates work in + // annotation and table queries. + ConvertSqlTimeColumnToEpochMs(values, timeIndex) + + switch columnValue := values[timeIndex].(type) { + case int64: + timestamp = float64(columnValue) + case float64: + timestamp = columnValue + default: + return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) + } + + if metricIndex >= 0 { + if columnValue, ok := values[metricIndex].(string); ok { + metric = columnValue + } else { + return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) + } + } + + for i, col := range columnNames { + if i == timeIndex || i == metricIndex { + continue + } + + if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil { + return err + } + + if metricIndex == -1 { + metric = col + } + + series, exist := pointsBySeries[metric] + if !exist { + series = &TimeSeries{Name: metric} + pointsBySeries[metric] = series + seriesByQueryOrder.PushBack(metric) + } + + if fillMissing { + var intervalStart float64 + if !exist { + intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) + } else { + intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval + } + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + + for i := intervalStart; i < timestamp; i += fillInterval { + series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + + series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)}) + + e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) + } + } + + for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { + key := elem.Value.(string) + result.Series = append(result.Series, pointsBySeries[key]) + + if fillMissing { + series := pointsBySeries[key] + // fill in values from last fetched value till interval end + intervalStart := series.Points[len(series.Points)-1][1].Float64 + intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { + series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + } + + result.Meta.Set("rowCount", rowCount) + return nil +} + // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds // to make native datetime types and epoch dates work in annotation and table queries. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) { From 2f3851b915620040204919b17b603c5b07a7de1a Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:10:17 +0200 Subject: [PATCH 052/104] postgres: use new sql engine --- pkg/tsdb/postgres/macros.go | 38 ++-- pkg/tsdb/postgres/macros_test.go | 2 +- pkg/tsdb/postgres/postgres.go | 269 +++-------------------------- pkg/tsdb/postgres/postgres_test.go | 30 ++-- 4 files changed, 64 insertions(+), 275 deletions(-) diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 61e88418ff4b5..661dbf3d4cef7 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type PostgresMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type postgresMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewPostgresMacroEngine() tsdb.SqlMacroEngine { - return &PostgresMacroEngine{} +func newPostgresMacroEngine() tsdb.SqlMacroEngine { + return &postgresMacroEngine{} } -func (m *PostgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": if len(args) == 0 { @@ -83,11 +83,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) @@ -97,16 +97,16 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil @@ -114,11 +114,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index 8c5818504306c..194573be0fd25 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -12,7 +12,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := NewPostgresMacroEngine() + engine := newPostgresMacroEngine() query := &tsdb.Query{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index f19e4fb54f4e7..b9f333db127bc 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -1,46 +1,38 @@ package postgres import ( - "container/list" - "context" - "fmt" - "math" + "database/sql" "net/url" "strconv" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type PostgresQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("postgres", NewPostgresQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("postgres", newPostgresQueryEndpoint) } -func NewPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &PostgresQueryEndpoint{ - log: log.New("tsdb.postgres"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewPostgresMacroEngine(), - } +func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.postgres") cnnstr := generateConnectionString(datasource) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("postgres", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "postgres", + ConnectionString: cnnstr, + Datasource: datasource, + MetricColumnTypes: []string{"UNKNOWN", "TEXT", "VARCHAR", "CHAR"}, } - return endpoint, nil + rowTransformer := postgresRowTransformer{ + log: logger, + } + + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger) } func generateConnectionString(datasource *models.DataSource) string { @@ -63,70 +55,15 @@ func generateConnectionString(datasource *models.DataSource) string { return u.String() } -func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) +type postgresRowTransformer struct { + log log.Logger } -func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - if err != nil { - return err - } - - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, len(columnNames)), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - // check if there is a column named time - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - } - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native postgres datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) +func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) + valuePtrs := make([]interface{}, len(columnTypes)) - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil -} - -func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) { - types, err := rows.ColumnTypes() - if err != nil { - return nil, err - } - - values := make([]interface{}, len(types)) - valuePtrs := make([]interface{}, len(types)) - - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { valuePtrs[i] = &values[i] } @@ -136,20 +73,20 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, // convert types not handled by lib/pq // unhandled types are returned as []byte - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { if value, ok := values[i].([]byte); ok { - switch types[i].DatabaseTypeName() { + switch columnTypes[i].DatabaseTypeName() { case "NUMERIC": if v, err := strconv.ParseFloat(string(value), 64); err == nil { values[i] = v } else { - e.log.Debug("Rows", "Error converting numeric to float", value) + t.log.Debug("Rows", "Error converting numeric to float", value) } case "UNKNOWN", "CIDR", "INET", "MACADDR": // char literals have type UNKNOWN values[i] = string(value) default: - e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value) + t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value) values[i] = string(value) } } @@ -157,159 +94,3 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, return values, nil } - -func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "UNKNOWN", "TEXT", "VARCHAR", "CHAR": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - rowCount++ - - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index a3a6d6546df53..089829bf5901f 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" @@ -22,8 +23,9 @@ import ( // The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest! // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a // preconfigured Postgres server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. func TestPostgres(t *testing.T) { // change to true to run the MySQL tests runPostgresTests := false @@ -36,19 +38,25 @@ func TestPostgres(t *testing.T) { Convey("PostgreSQL", t, func() { x := InitPostgresTestDB(t) - endpoint := &PostgresQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewPostgresMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.postgres"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newPostgresQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { sql := ` DROP TABLE IF EXISTS postgres_types; From 27db4540125ae1c5d342319fade4043bc2221081 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:10:45 +0200 Subject: [PATCH 053/104] mysql: use new sql engine --- pkg/tsdb/mysql/macros.go | 38 ++--- pkg/tsdb/mysql/macros_test.go | 2 +- pkg/tsdb/mysql/mysql.go | 265 +++------------------------------- pkg/tsdb/mysql/mysql_test.go | 30 ++-- 4 files changed, 61 insertions(+), 274 deletions(-) diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 584f731f3b80d..078d1ff54f897 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type MySqlMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type mySqlMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewMysqlMacroEngine() tsdb.SqlMacroEngine { - return &MySqlMacroEngine{} +func newMysqlMacroEngine() tsdb.SqlMacroEngine { + return &mySqlMacroEngine{} } -func (m *MySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__timeEpoch", "__time": if len(args) == 0 { @@ -78,11 +78,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -92,16 +92,16 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil @@ -109,11 +109,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 2561661b38591..003af9a737fa9 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -12,7 +12,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &MySqlMacroEngine{} + engine := &mySqlMacroEngine{} query := &tsdb.Query{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index 7eceaffdb09d4..645f6b49bbb10 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -1,39 +1,24 @@ package mysql import ( - "container/list" - "context" "database/sql" "fmt" - "math" "reflect" "strconv" "github.com/go-sql-driver/mysql" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type MysqlQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("mysql", newMysqlQueryEndpoint) } -func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &MysqlQueryEndpoint{ - log: log.New("tsdb.mysql"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewMysqlMacroEngine(), - } +func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.mysql") cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true", datasource.User, @@ -42,85 +27,35 @@ func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin datasource.Url, datasource.Database, ) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("mysql", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "mysql", + ConnectionString: cnnstr, + Datasource: datasource, + TimeColumnNames: []string{"time", "time_sec"}, + MetricColumnTypes: []string{"CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"}, } - return endpoint, nil -} - -// Query is the main function for the MysqlExecutor -func (e *MysqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) -} - -func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - columnCount := len(columnNames) - - if err != nil { - return err + rowTransformer := mysqlRowTransformer{ + log: logger, } - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, columnCount), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - // check if there is a column named time - for i, col := range columnNames { - switch col { - case "time", "time_sec": - timeIndex = i - } - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(), logger) } -func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) { - types, err := rows.ColumnTypes() - if err != nil { - return nil, err - } +type mysqlRowTransformer struct { + log log.Logger +} - values := make([]interface{}, len(types)) +func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) for i := range values { - scanType := types[i].ScanType() + scanType := columnTypes[i].ScanType() values[i] = reflect.New(scanType).Interface() - if types[i].DatabaseTypeName() == "BIT" { + if columnTypes[i].DatabaseTypeName() == "BIT" { values[i] = new([]byte) } } @@ -129,7 +64,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er return nil, err } - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { typeName := reflect.ValueOf(values[i]).Type().String() switch typeName { @@ -158,7 +93,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er } } - if types[i].DatabaseTypeName() == "DECIMAL" { + if columnTypes[i].DatabaseTypeName() == "DECIMAL" { f, err := strconv.ParseFloat(values[i].(string), 64) if err == nil { @@ -171,159 +106,3 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er return values, nil } - -func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time", "time_sec": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time or time_sec") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time/time_sec, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - rowCount++ - - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 850a37617e28c..3b4e283b726bf 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" @@ -21,8 +22,9 @@ import ( // The tests require a MySQL db named grafana_ds_tests and a user/password grafana/password // Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a // preconfigured MySQL server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. func TestMySQL(t *testing.T) { // change to true to run the MySQL tests runMySqlTests := false @@ -35,19 +37,25 @@ func TestMySQL(t *testing.T) { Convey("MySQL", t, func() { x := InitMySQLTestDB(t) - endpoint := &MysqlQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewMysqlMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.mysql"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newMysqlQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists { So(err, ShouldBeNil) From 4f7882cda2b3443e473caf426a321841b223a8ab Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:11:10 +0200 Subject: [PATCH 054/104] mssql: use new sql engine --- pkg/tsdb/mssql/macros.go | 38 ++--- pkg/tsdb/mssql/macros_test.go | 2 +- pkg/tsdb/mssql/mssql.go | 268 ++++------------------------------ pkg/tsdb/mssql/mssql_test.go | 30 ++-- 4 files changed, 64 insertions(+), 274 deletions(-) diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index ad3d1edd5d713..2c16b5cb27f1d 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type MsSqlMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type msSqlMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewMssqlMacroEngine() tsdb.SqlMacroEngine { - return &MsSqlMacroEngine{} +func newMssqlMacroEngine() tsdb.SqlMacroEngine { + return &msSqlMacroEngine{} } -func (m *MsSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": if len(args) == 0 { @@ -83,11 +83,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -97,16 +97,16 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil @@ -114,11 +114,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 49368fe363112..1895cd9944248 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -14,7 +14,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &MsSqlMacroEngine{} + engine := &msSqlMacroEngine{} query := &tsdb.Query{ Model: simplejson.New(), } diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index eb71259b46bea..72e57d03fa022 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -1,49 +1,40 @@ package mssql import ( - "container/list" - "context" "database/sql" "fmt" "strconv" "strings" - "math" - _ "github.com/denisenkom/go-mssqldb" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type MssqlQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("mssql", NewMssqlQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("mssql", newMssqlQueryEndpoint) } -func NewMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &MssqlQueryEndpoint{ - log: log.New("tsdb.mssql"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewMssqlMacroEngine(), - } +func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.mssql") cnnstr := generateConnectionString(datasource) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("mssql", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "mssql", + ConnectionString: cnnstr, + Datasource: datasource, + MetricColumnTypes: []string{"VARCHAR", "CHAR", "NVARCHAR", "NCHAR"}, + } + + rowTransformer := mssqlRowTransformer{ + log: logger, } - return endpoint, nil + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMssqlMacroEngine(), logger) } func generateConnectionString(datasource *models.DataSource) string { @@ -70,71 +61,16 @@ func generateConnectionString(datasource *models.DataSource) string { ) } -// Query is the main function for the MssqlQueryEndpoint -func (e *MssqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) -} - -func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - columnCount := len(columnNames) - - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, columnCount), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - - // check if there is a column named time - switch name { - case "time": - timeIndex = i - } - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("MsSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(columnTypes, rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds - // to make native mssql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil +type mssqlRowTransformer struct { + log log.Logger } -func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { - values := make([]interface{}, len(types)) - valuePtrs := make([]interface{}, len(types)) +func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) + valuePtrs := make([]interface{}, len(columnTypes)) - for i, stype := range types { - e.log.Debug("type", "type", stype) + for i, stype := range columnTypes { + t.log.Debug("type", "type", stype) valuePtrs[i] = &values[i] } @@ -144,17 +80,17 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. // convert types not handled by denisenkom/go-mssqldb // unhandled types are returned as []byte - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { if value, ok := values[i].([]byte); ok { - switch types[i].DatabaseTypeName() { + switch columnTypes[i].DatabaseTypeName() { case "MONEY", "SMALLMONEY", "DECIMAL": if v, err := strconv.ParseFloat(string(value), 64); err == nil { values[i] = v } else { - e.log.Debug("Rows", "Error converting numeric to float", value) + t.log.Debug("Rows", "Error converting numeric to float", value) } default: - e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value) + t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value) values[i] = string(value) } } @@ -162,157 +98,3 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. return values, nil } - -func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "VARCHAR", "CHAR", "NVARCHAR", "NCHAR": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("MSSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(columnTypes, rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type CHAR, VARCHAR, NCHAR or NVARCHAR. metric column name: %s type: %s but datatype is %T", columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index db04d6d1f0230..86484cb9d5e4d 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -19,8 +20,9 @@ import ( // The tests require a MSSQL db named grafanatest and a user/password grafana/Password! // Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a // preconfigured MSSQL server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. // If needed, change the variable below to the IP address of the database. var serverIP = "localhost" @@ -28,19 +30,25 @@ func TestMSSQL(t *testing.T) { SkipConvey("MSSQL", t, func() { x := InitMSSQLTestDB(t) - endpoint := &MssqlQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewMssqlMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.mssql"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newMssqlQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { sql := ` IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL From 318b8c5a2346d60ede4fe2f01ffb0f665501709c Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:12:00 +0200 Subject: [PATCH 055/104] update devenv datasources and dashboards for sql datasources Removed dashboards from docker blocks --- devenv/datasources.yaml | 28 +++- .../datasource_tests_mssql_fakedata.json | 79 ++++------ .../datasource_tests_mssql_unittest.json | 142 ++++++++---------- .../datasource_tests_mysql_fakedata.json | 68 +++------ .../datasource_tests_mysql_unittest.json | 136 ++++++++--------- .../datasource_tests_postgres_fakedata.json | 88 +++++------ .../datasource_tests_postgres_unittest.json | 142 ++++++++---------- 7 files changed, 306 insertions(+), 377 deletions(-) rename docker/blocks/mssql/dashboard.json => devenv/dev-dashboards/datasource_tests_mssql_fakedata.json (92%) rename docker/blocks/mssql_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_mssql_unittest.json (96%) rename docker/blocks/mysql/dashboard.json => devenv/dev-dashboards/datasource_tests_mysql_fakedata.json (92%) rename docker/blocks/mysql_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_mysql_unittest.json (96%) rename docker/blocks/postgres/dashboard.json => devenv/dev-dashboards/datasource_tests_postgres_fakedata.json (91%) rename docker/blocks/postgres_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_postgres_unittest.json (95%) diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index 241381097b126..a4e9bf0564104 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -51,12 +51,28 @@ datasources: user: grafana password: password + - name: gdev-mysql-ds-tests + type: mysql + url: localhost:3306 + database: grafana_ds_tests + user: grafana + password: password + - name: gdev-mssql type: mssql url: localhost:1433 database: grafana user: grafana - password: "Password!" + secureJsonData: + password: Password! + + - name: gdev-mssql-ds-tests + type: mssql + url: localhost:1433 + database: grafanatest + user: grafana + secureJsonData: + password: Password! - name: gdev-postgres type: postgres @@ -68,6 +84,16 @@ datasources: jsonData: sslmode: "disable" + - name: gdev-postgres-ds-tests + type: postgres + url: localhost:5432 + database: grafanadstest + user: grafanatest + secureJsonData: + password: grafanatest + jsonData: + sslmode: "disable" + - name: gdev-cloudwatch type: cloudwatch editable: true diff --git a/docker/blocks/mssql/dashboard.json b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json similarity index 92% rename from docker/blocks/mssql/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mssql_fakedata.json index ce9aa141a750b..4350b5e44a82b 100644 --- a/docker/blocks/mssql/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MSSQL", - "label": "MSSQL", - "description": "", - "type": "datasource", - "pluginId": "mssql", - "pluginName": "MSSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mssql", - "name": "MSSQL", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1520976748896, + "id": 203, + "iteration": 1532618661457, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 9, @@ -149,14 +113,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 18, @@ -234,14 +202,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 9, @@ -313,11 +285,15 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "columns": [], - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fontSize": "100%", "gridPos": { "h": 10, @@ -371,13 +347,13 @@ ], "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mssql", "fake-data-gen"], "templating": { "list": [ { "allValue": null, "current": {}, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -387,6 +363,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -397,7 +374,7 @@ { "allValue": null, "current": {}, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "hide": 0, "includeAll": true, "label": "Hostname", @@ -407,6 +384,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -499,6 +477,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -533,7 +512,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - MSSQL", + "title": "Datasource tests - MSSQL", "uid": "86Js1xRmk", - "version": 11 + "version": 1 } \ No newline at end of file diff --git a/docker/blocks/mssql_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json similarity index 96% rename from docker/blocks/mssql_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mssql_unittest.json index 80994254093aa..5c8eb8243a302 100644 --- a/docker/blocks/mssql_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MSSQL_TEST", - "label": "MSSQL Test", - "description": "", - "type": "datasource", - "pluginId": "mssql", - "pluginName": "Microsoft SQL Server" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mssql", - "name": "Microsoft SQL Server", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the mssql unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320861623, + "id": 35, + "iteration": 1532618879985, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1065,7 +1030,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1158,7 +1123,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1243,7 +1208,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1336,7 +1301,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1421,7 +1386,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1514,7 +1479,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1599,7 +1564,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1686,7 +1651,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1773,7 +1738,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1867,7 +1832,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1954,7 +1919,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2048,7 +2013,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2135,7 +2100,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2229,7 +2194,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2316,7 +2281,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2410,7 +2375,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2496,22 +2461,44 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mssql"], "templating": { "list": [ { "allValue": "'ALL'", - "current": {}, - "datasource": "${DS_MSSQL_TEST}", + "current": { + "selected": true, + "tags": [], + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mssql-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": false, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 0, "tagValuesQuery": "", "tags": [], @@ -2564,6 +2551,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2598,7 +2586,7 @@ ] }, "timezone": "", - "title": "Microsoft SQL Server Data Source Test", + "title": "Datasource tests - MSSQL (unit test)", "uid": "GlAqcPgmz", "version": 58 } \ No newline at end of file diff --git a/docker/blocks/mysql/dashboard.json b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json similarity index 92% rename from docker/blocks/mysql/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mysql_fakedata.json index dba7847cc7289..cef8fd4783f8b 100644 --- a/docker/blocks/mysql/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MYSQL", - "label": "MySQL", - "description": "", - "type": "datasource", - "pluginId": "mysql", - "pluginName": "MySQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mysql", - "name": "MySQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523372133566, + "id": 4, + "iteration": 1532620738041, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 9, @@ -161,7 +125,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 18, @@ -251,7 +215,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 9, @@ -332,7 +296,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fontSize": "100%", "gridPos": { "h": 9, @@ -390,6 +354,7 @@ "schemaVersion": 16, "style": "dark", "tags": [ + "gdev", "fake-data-gen", "mysql" ], @@ -397,8 +362,11 @@ "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_MYSQL}", + "current": { + "text": "America", + "value": "America" + }, + "datasource": "gdev-mysql", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -408,6 +376,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -417,8 +386,11 @@ }, { "allValue": null, - "current": {}, - "datasource": "${DS_MYSQL}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mysql", "hide": 0, "includeAll": true, "label": "Hostname", @@ -428,6 +400,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -520,6 +493,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -554,7 +528,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - MySQL", + "title": "Datasource tests - MySQL", "uid": "DGsCac3kz", "version": 8 } \ No newline at end of file diff --git a/docker/blocks/mysql_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json similarity index 96% rename from docker/blocks/mysql_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mysql_unittest.json index 53f313315bdee..2c20969da122b 100644 --- a/docker/blocks/mysql_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MYSQL_TEST", - "label": "MySQL TEST", - "description": "", - "type": "datasource", - "pluginId": "mysql", - "pluginName": "MySQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mysql", - "name": "MySQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the mysql unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320712115, + "id": 39, + "iteration": 1532620354037, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1059,7 +1024,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1152,7 +1117,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1237,7 +1202,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1330,7 +1295,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1415,7 +1380,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1508,7 +1473,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1593,7 +1558,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1687,7 +1652,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1774,7 +1739,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1868,7 +1833,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1955,7 +1920,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2049,7 +2014,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2136,7 +2101,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2230,7 +2195,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2316,22 +2281,42 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mysql"], "templating": { "list": [ { "allValue": "", - "current": {}, - "datasource": "${DS_MYSQL_TEST}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mysql-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": true, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 0, "tagValuesQuery": "", "tags": [], @@ -2384,6 +2369,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2418,7 +2404,7 @@ ] }, "timezone": "", - "title": "MySQL Data Source Test", + "title": "Datasource tests - MySQL (unittest)", "uid": "Hmf8FDkmz", "version": 12 } \ No newline at end of file diff --git a/docker/blocks/postgres/dashboard.json b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json similarity index 91% rename from docker/blocks/postgres/dashboard.json rename to devenv/dev-dashboards/datasource_tests_postgres_fakedata.json index 77b0ceac62442..1afa6e25df86c 100644 --- a/docker/blocks/postgres/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_POSTGRESQL", - "label": "PostgreSQL", - "description": "", - "type": "datasource", - "pluginId": "postgres", - "pluginName": "PostgreSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "" - }, - { - "type": "datasource", - "id": "postgres", - "name": "PostgreSQL", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1518601837383, + "id": 5, + "iteration": 1532620601931, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 9, @@ -150,14 +114,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 18, @@ -236,14 +204,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 9, @@ -316,11 +288,15 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "columns": [], - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fontSize": "100%", "gridPos": { "h": 9, @@ -377,6 +353,7 @@ "schemaVersion": 16, "style": "dark", "tags": [ + "gdev", "fake-data-gen", "postgres" ], @@ -384,8 +361,11 @@ "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRESQL}", + "current": { + "text": "America", + "value": "America" + }, + "datasource": "gdev-postgres", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -395,6 +375,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -404,8 +385,11 @@ }, { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRESQL}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-postgres", "hide": 0, "includeAll": true, "label": "Hostname", @@ -415,6 +399,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -507,6 +492,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -541,7 +527,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - PostgreSQL", + "title": "Datasource tests - Postgres", "uid": "JYola5qzz", - "version": 1 + "version": 4 } \ No newline at end of file diff --git a/docker/blocks/postgres_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json similarity index 95% rename from docker/blocks/postgres_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_postgres_unittest.json index 9efbe90bdfec6..d7d5f238e85f5 100644 --- a/docker/blocks/postgres_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_POSTGRES_TEST", - "label": "Postgres TEST", - "description": "", - "type": "datasource", - "pluginId": "postgres", - "pluginName": "PostgreSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "postgres", - "name": "PostgreSQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the postgres unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320929325, + "id": 38, + "iteration": 1532619575136, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1047,7 +1012,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1140,7 +1105,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1225,7 +1190,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1318,7 +1283,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1403,7 +1368,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1496,7 +1461,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1581,7 +1546,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1675,7 +1640,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1762,7 +1727,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1856,7 +1821,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1943,7 +1908,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2037,7 +2002,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2124,7 +2089,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2218,7 +2183,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2304,22 +2269,46 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "postgres"], "templating": { "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRES_TEST}", + "current": { + "selected": true, + "tags": [], + "text": "All", + "value": [ + "$__all" + ] + }, + "datasource": "gdev-postgres-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": true, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -2372,6 +2361,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2406,7 +2396,7 @@ ] }, "timezone": "", - "title": "Postgres Data Source Test", + "title": "Datasource tests - Postgres (unittest)", "uid": "vHQdlVziz", - "version": 14 + "version": 17 } \ No newline at end of file From ab8fa0de7443136afeab82fcf8713fddbdc23a48 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 21:39:02 +0200 Subject: [PATCH 056/104] elasticsearch: support reversed index patterns Now both [index-]pattern and pattern[-index] are supported --- .../elasticsearch/client/index_pattern.go | 35 ++++++++++++++----- .../client/index_pattern_test.go | 27 +++++++++++++- 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go index 8391e902ea478..952b5c4f80668 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -248,13 +248,28 @@ var datePatternReplacements = map[string]string{ func formatDate(t time.Time, pattern string) string { var datePattern string - parts := strings.Split(strings.TrimLeft(pattern, "["), "]") - base := parts[0] - if len(parts) == 2 { - datePattern = parts[1] - } else { - datePattern = base - base = "" + base := "" + ltr := false + + if strings.HasPrefix(pattern, "[") { + parts := strings.Split(strings.TrimLeft(pattern, "["), "]") + base = parts[0] + if len(parts) == 2 { + datePattern = parts[1] + } else { + datePattern = base + base = "" + } + ltr = true + } else if strings.HasSuffix(pattern, "]") { + parts := strings.Split(strings.TrimRight(pattern, "]"), "[") + datePattern = parts[0] + if len(parts) == 2 { + base = parts[1] + } else { + base = "" + } + ltr = false } formatted := t.Format(patternToLayout(datePattern)) @@ -293,7 +308,11 @@ func formatDate(t time.Time, pattern string) string { formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Hour()), -1) } - return base + formatted + if ltr { + return base + formatted + } + + return formatted + base } func patternToLayout(pattern string) string { diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go index 3bd823d8c87a8..ca20b39d5328b 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern_test.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -28,29 +28,54 @@ func TestIndexPattern(t *testing.T) { to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { - //So(indices, ShouldHaveLength, 1) + So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15.17") }) + indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15.17-data") + }) + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15") }) + indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15-data") + }) + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.20") }) + indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.20-data") + }) + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05") }) + indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05-data") + }) + indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018") }) + + indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018-data") + }) }) Convey("Hourly interval", t, func() { From 48e5e65c73eea000bf2b702b8743de0146e29f86 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 10:33:06 +0200 Subject: [PATCH 057/104] changelog: add notes about closing #12731 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6409f094f6573..ad1b63234e9f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) +* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) # 5.2.2 (2018-07-25) From 675a031b6c9c367fe27de5e839c1d919ca09021d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:04:01 +0200 Subject: [PATCH 058/104] All except one passing --- public/app/plugins/panel/singlestat/module.ts | 5 ++++- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index 7fafb5902d138..b63182141c193 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,11 +310,14 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { - console.log(lastPoint, lastValue); + // console.log(lastPoint, lastValue); + // console.log(this.panel.valueName); + // console.log(this.panel); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; let decimalInfo = this.getDecimalsForValue(data.value); + console.log(decimalInfo); let formatFunc = kbn.valueFormats[this.panel.format]; data.valueFormatted = formatFunc(data.value, decimalInfo.decimals, decimalInfo.scaledDecimals); data.valueRounded = kbn.roundValue(data.value, decimalInfo.decimals); diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 7b89f86250c49..798298415a9c4 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -192,6 +192,8 @@ describe('SingleStatCtrl', function() { ) { ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; + ctx.ctrl.panel.valueName = 'avg'; + ctx.ctrl.panel.format = 'none'; }); it('Should be rounded', function() { @@ -259,7 +261,9 @@ describe('SingleStatCtrl', function() { singleStatScenario('with default values', function(ctx) { ctx.setup(function() { ctx.data = tableData; + ctx.ctrl.panel = {}; ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.format = 'none'; }); it('Should use first rows value as default main value', function() { From 47da3e3ae83f36207cedfa26e9b5d51ca21b112f Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:28:16 +0200 Subject: [PATCH 059/104] All tests passing --- public/app/plugins/panel/singlestat/module.ts | 4 ---- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index b63182141c193..ebd2628b0864c 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,14 +310,10 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { - // console.log(lastPoint, lastValue); - // console.log(this.panel.valueName); - // console.log(this.panel); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; let decimalInfo = this.getDecimalsForValue(data.value); - console.log(decimalInfo); let formatFunc = kbn.valueFormats[this.panel.format]; data.valueFormatted = formatFunc(data.value, decimalInfo.decimals, decimalInfo.scaledDecimals); data.valueRounded = kbn.roundValue(data.value, decimalInfo.decimals); diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 798298415a9c4..552ac2412d659 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -293,6 +293,7 @@ describe('SingleStatCtrl', function() { ctx.setup(function() { ctx.data = tableData; ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; + ctx.ctrl.panel.mappingType = 0; ctx.ctrl.panel.tableColumn = 'mean'; }); @@ -310,6 +311,7 @@ describe('SingleStatCtrl', function() { ctx.setup(function() { ctx.data = tableData; ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; + ctx.ctrl.panel.mappingType = 2; ctx.ctrl.panel.tableColumn = 'mean'; ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; }); From 3d21e42aac715c28fe3325bd3ce9f7a00cb39312 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:30:37 +0200 Subject: [PATCH 060/104] Remove Karma file --- .../singlestat/specs/singlestat_specs.ts | 362 ------------------ 1 file changed, 362 deletions(-) delete mode 100644 public/app/plugins/panel/singlestat/specs/singlestat_specs.ts diff --git a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts b/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts deleted file mode 100644 index 217ec5ee04c22..0000000000000 --- a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts +++ /dev/null @@ -1,362 +0,0 @@ -import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import helpers from 'test/specs/helpers'; -import { SingleStatCtrl } from '../module'; -import moment from 'moment'; - -describe('SingleStatCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - var epoch = 1505826363746; - var clock; - - function singleStatScenario(desc, func) { - describe(desc, function() { - ctx.setup = function(setupFunc) { - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(SingleStatCtrl)); - - beforeEach(function() { - setupFunc(); - ctx.ctrl.onDataReceived(ctx.data); - ctx.data = ctx.ctrl.data; - }); - }; - - func(ctx); - }); - } - - singleStatScenario('with defaults', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; - }); - - it('Should use series avg as default main value', function() { - expect(ctx.data.value).to.be(15); - expect(ctx.data.valueRounded).to.be(15); - }); - - it('should set formatted falue', function() { - expect(ctx.data.valueFormatted).to.be('15'); - }); - }); - - singleStatScenario('showing serie name instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; - ctx.ctrl.panel.valueName = 'name'; - }); - - it('Should use series avg as default main value', function() { - expect(ctx.data.value).to.be(0); - expect(ctx.data.valueRounded).to.be(0); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('test.cpu1'); - }); - }); - - singleStatScenario('showing last iso time instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsIso'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); - }); - }); - - singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsIso'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); - }); - }); - - singleStatScenario('showing last us time instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsUS'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a')); - }); - }); - - singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsUS'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); - }); - }); - - singleStatScenario('showing last time from now instead of value', function(ctx) { - beforeEach(() => { - clock = sinon.useFakeTimers(epoch); - }); - - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeFromNow'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('2 days ago'); - }); - - afterEach(() => { - clock.restore(); - }); - }); - - singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { - beforeEach(() => { - clock = sinon.useFakeTimers(epoch); - }); - - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeFromNow'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('2 days ago'); - }); - - afterEach(() => { - clock.restore(); - }); - }); - - singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( - ctx - ) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; - }); - - it('Should be rounded', function() { - expect(ctx.data.value).to.be(99.999495); - expect(ctx.data.valueRounded).to.be(100); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('100'); - }); - }); - - singleStatScenario('When value to text mapping is specified', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[9.9, 1]] }]; - ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; - }); - - it('value should remain', function() { - expect(ctx.data.value).to.be(9.9); - }); - - it('round should be rounded up', function() { - expect(ctx.data.valueRounded).to.be(10); - }); - - it('Should replace value with text', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for first range', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[41, 50]] }]; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text OK', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[65, 75]] }]; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('NOT OK'); - }); - }); - - describe('When table data', function() { - const tableData = [ - { - columns: [{ text: 'Time', type: 'time' }, { text: 'test1' }, { text: 'mean' }, { text: 'test2' }], - rows: [[1492759673649, 'ignore1', 15, 'ignore2']], - type: 'table', - }, - ]; - - singleStatScenario('with default values', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should use first rows value as default main value', function() { - expect(ctx.data.value).to.be(15); - expect(ctx.data.valueRounded).to.be(15); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('15'); - }); - }); - - singleStatScenario('When table data has multiple columns', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.ctrl.panel.tableColumn = ''; - }); - - it('Should set column to first column that is not time', function() { - expect(ctx.ctrl.panel.tableColumn).to.be('test1'); - }); - }); - - singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( - ctx - ) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should be rounded', function() { - expect(ctx.data.value).to.be(99.99999); - expect(ctx.data.valueRounded).to.be(100); - }); - - it('should set formatted falue', function() { - expect(ctx.data.valueFormatted).to.be('100'); - }); - }); - - singleStatScenario('When value to text mapping is specified', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; - }); - - it('value should remain', function() { - expect(ctx.data.value).to.be(9.9); - }); - - it('round should be rounded up', function() { - expect(ctx.data.valueRounded).to.be(10); - }); - - it('Should replace value with text', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for first range', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 41, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text OK', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('NOT OK'); - }); - }); - - singleStatScenario('When value is string', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'test1'; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('ignore1'); - }); - }); - - singleStatScenario('When value is zero', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 0, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should return zero', function() { - expect(ctx.data.value).to.be(0); - }); - }); - }); -}); From bff7a293562125dc8423919f23a871d7141fa189 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:34:14 +0200 Subject: [PATCH 061/104] Cleanup --- .../panel/singlestat/specs/singlestat.jest.ts | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 552ac2412d659..7e8915ca5375e 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -1,6 +1,3 @@ -// import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -// import helpers from 'test/specs/helpers'; import { SingleStatCtrl } from '../module'; import moment from 'moment'; @@ -30,17 +27,6 @@ describe('SingleStatCtrl', function() { function singleStatScenario(desc, func) { describe(desc, function() { ctx.setup = function(setupFunc) { - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase()); - // beforeEach(ctx.createPanelController(SingleStatCtrl)); - beforeEach(function() { ctx.ctrl = new SingleStatCtrl($scope, $injector, {}); setupFunc(); @@ -107,7 +93,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; - // ctx.setIsUtc(true); ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); @@ -139,7 +124,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; - // ctx.setIsUtc(true); ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); @@ -149,11 +133,6 @@ describe('SingleStatCtrl', function() { }); singleStatScenario('showing last time from now instead of value', function(ctx) { - beforeEach(() => { - // clock = sinon.useFakeTimers(epoch); - //jest.useFakeTimers(); - }); - ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; @@ -168,10 +147,6 @@ describe('SingleStatCtrl', function() { it('should set formatted value', function() { expect(ctx.data.valueFormatted).toBe('2 days ago'); }); - - afterEach(() => { - // jest.clearAllTimers(); - }); }); singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { @@ -179,7 +154,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeFromNow'; - // ctx.setIsUtc(true); }); it('should set formatted value', function() { From e43feb7bfa0551125f82dbcf6503564227f091a1 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 13:21:40 +0200 Subject: [PATCH 062/104] use const for rowlimit in sql engine --- pkg/tsdb/sql_engine.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 9321e8912dc57..27ed37923a36d 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -100,6 +100,8 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo return &queryEndpoint, nil } +const rowLimit = 1000000 + // Query is the main function for the SqlQueryEndpoint func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) { result := &Response{ @@ -164,7 +166,6 @@ func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, resul return err } - rowLimit := 1000000 rowCount := 0 timeIndex := -1 @@ -225,7 +226,6 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, return err } - rowLimit := 1000000 rowCount := 0 timeIndex := -1 metricIndex := -1 From 67c613a45a3ab3b15b587e6999e83a63d52a1582 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 13:29:57 +0200 Subject: [PATCH 063/104] Begin conversion --- public/app/core/specs/backend_srv.jest.ts | 39 +++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 public/app/core/specs/backend_srv.jest.ts diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts new file mode 100644 index 0000000000000..6281f3814ce6f --- /dev/null +++ b/public/app/core/specs/backend_srv.jest.ts @@ -0,0 +1,39 @@ +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('app/core/store'); + +describe('backend_srv', function() { + let _httpBackend = options => { + if (options.method === 'GET' && options.url === 'gateway-error') { + return Promise.reject({ status: 502 }); + } else if (options.method === 'POST') { + // return Promise.resolve({}); + } + return Promise.resolve({}); + }; + + let _backendSrv = new BackendSrv(_httpBackend, {}, {}, {}, {}); + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.inject(function($httpBackend, $http, backendSrv) { + // _httpBackend = $httpBackend; + // _backendSrv = backendSrv; + // }) + // ); + + describe('when handling errors', function() { + it('should return the http status code', function(done) { + // _httpBackend.whenGET('gateway-error').respond(502); + _backendSrv + .datasourceRequest({ + url: 'gateway-error', + }) + .catch(function(err) { + expect(err.status).toBe(502); + done(); + }); + // _httpBackend.flush(); + }); + }); +}); From b4ac3f2379e675439f571c308eb36581d4a39984 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 13:33:50 +0200 Subject: [PATCH 064/104] update devenv datasources and dashboards for sql datasources --- devenv/dev-dashboards/datasource_tests_mssql_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_mssql_unittest.json | 1 - devenv/dev-dashboards/datasource_tests_mysql_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_mysql_unittest.json | 1 - devenv/dev-dashboards/datasource_tests_postgres_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_postgres_unittest.json | 1 - 6 files changed, 6 deletions(-) diff --git a/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json index 4350b5e44a82b..e810a686134bc 100644 --- a/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 203, "iteration": 1532618661457, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json index 5c8eb8243a302..d47cfb0ad6ea3 100644 --- a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 35, "iteration": 1532618879985, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json index cef8fd4783f8b..ebeb452fc4c79 100644 --- a/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 4, "iteration": 1532620738041, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json index 2c20969da122b..326114ec8ff6d 100644 --- a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 39, "iteration": 1532620354037, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json index 1afa6e25df86c..508cae86bc3a4 100644 --- a/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 5, "iteration": 1532620601931, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json index d7d5f238e85f5..85151089b7f3d 100644 --- a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 38, "iteration": 1532619575136, "links": [], "panels": [ From 55111c801fbdc74687d74136dc73daf2aa29131c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 13:41:07 +0200 Subject: [PATCH 065/104] Update test for local time --- .../plugins/panel/singlestat/specs/singlestat.jest.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 7e8915ca5375e..dd02b5c169c5a 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -23,6 +23,9 @@ describe('SingleStatCtrl', function() { SingleStatCtrl.prototype.dashboard = { isTimezoneUtc: jest.fn(() => true), }; + SingleStatCtrl.prototype.events = { + on: () => {}, + }; function singleStatScenario(desc, func) { describe(desc, function() { @@ -84,7 +87,7 @@ describe('SingleStatCtrl', function() { }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe('2017-09-17 09:56:37'); + expect(moment(ctx.data.valueFormatted).isSame('2017-09-17 09:56:37')).toBe(true); }); }); @@ -235,7 +238,9 @@ describe('SingleStatCtrl', function() { singleStatScenario('with default values', function(ctx) { ctx.setup(function() { ctx.data = tableData; - ctx.ctrl.panel = {}; + ctx.ctrl.panel = { + emit: () => {}, + }; ctx.ctrl.panel.tableColumn = 'mean'; ctx.ctrl.panel.format = 'none'; }); From 971e52ecc98126788066f0452aeaa7bf93f7baf2 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Fri, 27 Jul 2018 13:48:14 +0200 Subject: [PATCH 066/104] removed unused class from the deletebutton pr --- public/app/containers/Teams/TeamList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index b86763d879985..31406250cb3f6 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -88,7 +88,7 @@ export class TeamList extends React.Component { -
    +
    - this.deleteTeam(team)} /> + this.deleteTeam(team)} />
    From 4e6168f3a331e5701e279305774413eca87499d4 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 14:22:48 +0200 Subject: [PATCH 067/104] Add async/await --- public/app/core/specs/backend_srv.jest.ts | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts index 6281f3814ce6f..2d62716622a70 100644 --- a/public/app/core/specs/backend_srv.jest.ts +++ b/public/app/core/specs/backend_srv.jest.ts @@ -3,10 +3,9 @@ jest.mock('app/core/store'); describe('backend_srv', function() { let _httpBackend = options => { - if (options.method === 'GET' && options.url === 'gateway-error') { + console.log(options); + if (options.url === 'gateway-error') { return Promise.reject({ status: 502 }); - } else if (options.method === 'POST') { - // return Promise.resolve({}); } return Promise.resolve({}); }; @@ -22,17 +21,14 @@ describe('backend_srv', function() { // }) // ); - describe('when handling errors', function() { - it('should return the http status code', function(done) { + describe('when handling errors', () => { + it('should return the http status code', async () => { // _httpBackend.whenGET('gateway-error').respond(502); - _backendSrv - .datasourceRequest({ - url: 'gateway-error', - }) - .catch(function(err) { - expect(err.status).toBe(502); - done(); - }); + let res = await _backendSrv.datasourceRequest({ + url: 'gateway-error', + }); + console.log(res); + expect(res.status).toBe(502); // _httpBackend.flush(); }); }); From 2db4a54f75c7c1bd8a3a70ea0d4be50f88ab0552 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 14:40:56 +0200 Subject: [PATCH 068/104] Fix test --- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index dd02b5c169c5a..0480d0be5c30c 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -87,7 +87,7 @@ describe('SingleStatCtrl', function() { }); it('should set formatted value', function() { - expect(moment(ctx.data.valueFormatted).isSame('2017-09-17 09:56:37')).toBe(true); + expect(moment(ctx.data.valueFormatted).valueOf()).toBe(1505634997000); }); }); From 766c23a1eb86d6ba47b2d61d9b72153089b73264 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 15:16:19 +0200 Subject: [PATCH 069/104] Fix emit errors --- public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index 3ebcf6cdf313c..a0c7dd0ab9ca2 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -34,6 +34,9 @@ describe('GraphCtrl', () => { beforeEach(() => { ctx.ctrl = new GraphCtrl(scope, injector, {}); + ctx.ctrl.events = { + emit: () => {}, + }; ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); }); From b28a362635876bc321063127f0e3ddf3d599cb79 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 21 Jul 2018 11:04:05 +0200 Subject: [PATCH 070/104] Use metric column as prefix If multiple value columns are returned and a metric column is returned aswell the metric column will be used as prefix for the series name --- docs/sources/features/datasources/postgres.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md index f9af60a2efc18..f3e52ed6652af 100644 --- a/docs/sources/features/datasources/postgres.md +++ b/docs/sources/features/datasources/postgres.md @@ -101,7 +101,7 @@ The resulting table panel: If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch. Any column except `time` and `metric` is treated as a value column. -You may return a column named `metric` that is used as metric name for the value column. +You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name. **Example with `metric` column:** From f9d6c88a556142791bc6ba0af96ca46dd0dac037 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 24 Jul 2018 18:31:47 +0200 Subject: [PATCH 071/104] add testcase for metric column as prefix --- pkg/tsdb/postgres/postgres_test.go | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 089829bf5901f..c7787929a9d53 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -568,6 +568,31 @@ func TestPostgres(t *testing.T) { So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one") }) + Convey("When doing a metric query with metric column and multiple value columns", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT $__timeEpoch(time), measurement as metric, "valueOne", "valueTwo" FROM metric_values ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 4) + So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne") + So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo") + So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne") + So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo") + }) + Convey("When doing a metric query grouping by time should return correct series", func() { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ From 7905c29875a29d230af476e41cb070b13bc9de73 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 24 Jul 2018 19:25:48 +0200 Subject: [PATCH 072/104] adjust metric prefix code to sql engine refactor --- pkg/tsdb/sql_engine.go | 15 ++++++++++++++- .../postgres/partials/query.editor.html | 5 ++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 27ed37923a36d..027f37fc2433c 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -229,6 +229,8 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, rowCount := 0 timeIndex := -1 metricIndex := -1 + metricPrefix := false + var metricPrefixValue string // check columns of resultset: a column named time is mandatory // the first text column is treated as metric name unless a column named metric is present @@ -256,6 +258,11 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, } } + // use metric column as prefix with multiple value columns + if metricIndex != -1 && len(columnNames) > 3 { + metricPrefix = true + } + if timeIndex == -1 { return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or ")) } @@ -301,7 +308,11 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, if metricIndex >= 0 { if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue + if metricPrefix { + metricPrefixValue = columnValue + } else { + metric = columnValue + } } else { return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) } @@ -318,6 +329,8 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, if metricIndex == -1 { metric = col + } else if metricPrefix { + metric = metricPrefixValue + " " + col } series, exist := pointsBySeries[metric] diff --git a/public/app/plugins/datasource/postgres/partials/query.editor.html b/public/app/plugins/datasource/postgres/partials/query.editor.html index 26392c17356c2..b7c12471f5212 100644 --- a/public/app/plugins/datasource/postgres/partials/query.editor.html +++ b/public/app/plugins/datasource/postgres/partials/query.editor.html @@ -40,7 +40,10 @@
    Time series:
     - return column named time (UTC in seconds or timestamp)
     - return column(s) with numeric datatype as values
    -- (Optional: return column named metric to represent the series name. If no column named metric is found the column name of the value column is used as series name)
    +Optional: 
    +  - return column named metric to represent the series name. 
    +  - If multiple value columns are returned the metric column is used as prefix. 
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    
    From 2f6b302375bbe7c562e6df09760f1f4b495b2715 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 15:51:56 +0200
    Subject: [PATCH 073/104] Test passing. Remove Karma
    
    ---
     public/app/core/specs/backend_srv.jest.ts  | 23 +++++-----------
     public/app/core/specs/backend_srv_specs.ts | 31 ----------------------
     2 files changed, 7 insertions(+), 47 deletions(-)
     delete mode 100644 public/app/core/specs/backend_srv_specs.ts
    
    diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts
    index 2d62716622a70..c65464aa875bb 100644
    --- a/public/app/core/specs/backend_srv.jest.ts
    +++ b/public/app/core/specs/backend_srv.jest.ts
    @@ -12,24 +12,15 @@ describe('backend_srv', function() {
     
       let _backendSrv = new BackendSrv(_httpBackend, {}, {}, {}, {});
     
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(
    -  //     angularMocks.inject(function($httpBackend, $http, backendSrv) {
    -  //       _httpBackend = $httpBackend;
    -  //       _backendSrv = backendSrv;
    -  //     })
    -  //   );
    -
       describe('when handling errors', () => {
         it('should return the http status code', async () => {
    -      //   _httpBackend.whenGET('gateway-error').respond(502);
    -      let res = await _backendSrv.datasourceRequest({
    -        url: 'gateway-error',
    -      });
    -      console.log(res);
    -      expect(res.status).toBe(502);
    -      //   _httpBackend.flush();
    +      try {
    +        await _backendSrv.datasourceRequest({
    +          url: 'gateway-error',
    +        });
    +      } catch (err) {
    +        expect(err.status).toBe(502);
    +      }
         });
       });
     });
    diff --git a/public/app/core/specs/backend_srv_specs.ts b/public/app/core/specs/backend_srv_specs.ts
    deleted file mode 100644
    index 74b058b98c82d..0000000000000
    --- a/public/app/core/specs/backend_srv_specs.ts
    +++ /dev/null
    @@ -1,31 +0,0 @@
    -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
    -import 'app/core/services/backend_srv';
    -
    -describe('backend_srv', function() {
    -  var _backendSrv;
    -  var _httpBackend;
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(
    -    angularMocks.inject(function($httpBackend, $http, backendSrv) {
    -      _httpBackend = $httpBackend;
    -      _backendSrv = backendSrv;
    -    })
    -  );
    -
    -  describe('when handling errors', function() {
    -    it('should return the http status code', function(done) {
    -      _httpBackend.whenGET('gateway-error').respond(502);
    -      _backendSrv
    -        .datasourceRequest({
    -          url: 'gateway-error',
    -        })
    -        .catch(function(err) {
    -          expect(err.status).to.be(502);
    -          done();
    -        });
    -      _httpBackend.flush();
    -    });
    -  });
    -});
    
    From c11d0f5cc6289b708d1e0d7c072de7eb6b1b8422 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 15:52:22 +0200
    Subject: [PATCH 074/104] Remove lo
    
    ---
     public/app/core/specs/backend_srv.jest.ts | 1 -
     1 file changed, 1 deletion(-)
    
    diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts
    index c65464aa875bb..b19bd1177665e 100644
    --- a/public/app/core/specs/backend_srv.jest.ts
    +++ b/public/app/core/specs/backend_srv.jest.ts
    @@ -3,7 +3,6 @@ jest.mock('app/core/store');
     
     describe('backend_srv', function() {
       let _httpBackend = options => {
    -    console.log(options);
         if (options.url === 'gateway-error') {
           return Promise.reject({ status: 502 });
         }
    
    From 895b4b40eee4af0ee79b0935856ff1c532ebeb94 Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Fri, 27 Jul 2018 16:26:04 +0200
    Subject: [PATCH 075/104] correct volume unit
    
    ---
     public/app/core/specs/kbn.jest.ts |  2 +-
     public/app/core/utils/kbn.ts      | 36 +++++++++++++++----------------
     2 files changed, 19 insertions(+), 19 deletions(-)
    
    diff --git a/public/app/core/specs/kbn.jest.ts b/public/app/core/specs/kbn.jest.ts
    index 6894506804314..9c62990615c0a 100644
    --- a/public/app/core/specs/kbn.jest.ts
    +++ b/public/app/core/specs/kbn.jest.ts
    @@ -402,7 +402,7 @@ describe('duration', function() {
     describe('volume', function() {
       it('1000m3', function() {
         var str = kbn.valueFormats['m3'](1000, 1, null);
    -    expect(str).toBe('1000.0 m3');
    +    expect(str).toBe('1000.0 m³');
       });
     });
     
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 4fc4829811f19..74ef2a9e87405 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -572,9 +572,9 @@ kbn.valueFormats.accG = kbn.formatBuilders.fixedUnit('g');
     // Volume
     kbn.valueFormats.litre = kbn.formatBuilders.decimalSIPrefix('L');
     kbn.valueFormats.mlitre = kbn.formatBuilders.decimalSIPrefix('L', -1);
    -kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m3');
    -kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm3');
    -kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm3');
    +kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m³');
    +kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm³');
    +kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm³');
     kbn.valueFormats.gallons = kbn.formatBuilders.fixedUnit('gal');
     
     // Flow
    @@ -605,14 +605,14 @@ kbn.valueFormats.radsvh = kbn.formatBuilders.decimalSIPrefix('Sv/h');
     // Concentration
     kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm');
     kbn.valueFormats.conppb = kbn.formatBuilders.fixedUnit('ppb');
    -kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m3');
    -kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm3');
    -kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m3');
    -kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm3');
    -kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m3');
    -kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm3');
    -kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m3');
    -kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm3');
    +kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m³');
    +kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm³');
    +kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m³');
    +kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm³');
    +kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m³');
    +kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm³');
    +kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m³');
    +kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm³');
     
     // Time
     kbn.valueFormats.hertz = kbn.formatBuilders.decimalSIPrefix('Hz');
    @@ -1119,13 +1119,13 @@ kbn.getUnitFormats = function() {
             { text: 'parts-per-million (ppm)', value: 'ppm' },
             { text: 'parts-per-billion (ppb)', value: 'conppb' },
             { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' },
    -        { text: 'nanogram per normal cubic metre (ng/Nm3)', value: 'conngNm3' },
    -        { text: 'microgram per cubic metre (μg/m3)', value: 'conμgm3' },
    -        { text: 'microgram per normal cubic metre (μg/Nm3)', value: 'conμgNm3' },
    -        { text: 'milligram per cubic metre (mg/m3)', value: 'conmgm3' },
    -        { text: 'milligram per normal cubic metre (mg/Nm3)', value: 'conmgNm3' },
    -        { text: 'gram per cubic metre (g/m3)', value: 'congm3' },
    -        { text: 'gram per normal cubic metre (g/Nm3)', value: 'congNm3' },
    +        { text: 'nanogram per normal cubic metre (ng/Nm³)', value: 'conngNm3' },
    +        { text: 'microgram per cubic metre (μg/m³)', value: 'conμgm3' },
    +        { text: 'microgram per normal cubic metre (μg/Nm³)', value: 'conμgNm3' },
    +        { text: 'milligram per cubic metre (mg/m³)', value: 'conmgm3' },
    +        { text: 'milligram per normal cubic metre (mg/Nm³)', value: 'conmgNm3' },
    +        { text: 'gram per cubic metre (g/m³)', value: 'congm3' },
    +        { text: 'gram per normal cubic metre (g/Nm³)', value: 'congNm3' },
           ],
         },
       ];
    
    From 26f709e87ea5d551b46f3b15909165aee732e298 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 16:45:03 +0200
    Subject: [PATCH 076/104] Karm to Jest
    
    ---
     ...map_ctrl_specs.ts => heatmap_ctrl.jest.ts} | 44 ++++++++++---------
     1 file changed, 24 insertions(+), 20 deletions(-)
     rename public/app/plugins/panel/heatmap/specs/{heatmap_ctrl_specs.ts => heatmap_ctrl.jest.ts} (61%)
    
    diff --git a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    similarity index 61%
    rename from public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts
    rename to public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    index 98055ccf52ddd..70449763856cd 100644
    --- a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts
    +++ b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    @@ -1,26 +1,30 @@
    -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common';
    -
     import moment from 'moment';
     import { HeatmapCtrl } from '../heatmap_ctrl';
    -import helpers from '../../../../../test/specs/helpers';
     
     describe('HeatmapCtrl', function() {
    -  var ctx = new helpers.ControllerTestContext();
    +  let ctx = {};
     
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(angularMocks.module('grafana.controllers'));
    -  beforeEach(
    -    angularMocks.module(function($compileProvider) {
    -      $compileProvider.preAssignBindingsEnabled(true);
    -    })
    -  );
    +  let $injector = {
    +      get: () => {}
    +  };
     
    -  beforeEach(ctx.providePhase());
    -  beforeEach(ctx.createPanelController(HeatmapCtrl));
    -  beforeEach(() => {
    -    ctx.ctrl.annotationsPromise = Promise.resolve({});
    -    ctx.ctrl.updateTimeRange();
    -  });
    +  let $scope = {
    +    $on: () => {},
    +    events: {
    +        on: () => {}
    +    }
    +  };
    +
    +HeatmapCtrl.prototype.panel = {
    +    events: {
    +        on: () => {},
    +        emit: () => {}
    +    }
    +};
    +
    +    beforeEach(() => {
    +        ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    +    });
     
       describe('when time series are outside range', function() {
         beforeEach(function() {
    @@ -36,7 +40,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsOutside', function() {
    -      expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range');
    +      expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range');
         });
       });
     
    @@ -61,7 +65,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsOutside', function() {
    -      expect(ctx.ctrl.dataWarning).to.be(null);
    +      expect(ctx.ctrl.dataWarning).toBe(null);
         });
       });
     
    @@ -72,7 +76,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsCount warning', function() {
    -      expect(ctx.ctrl.dataWarning.title).to.be('No data points');
    +      expect(ctx.ctrl.dataWarning.title).toBe('No data points');
         });
       });
     });
    
    From 805dc3542f780c57f477c61cf9cf475515aa3760 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 16:46:41 +0200
    Subject: [PATCH 077/104] Remove extra mock
    
    ---
     .../panel/heatmap/specs/heatmap_ctrl.jest.ts  | 21 ++++++++-----------
     1 file changed, 9 insertions(+), 12 deletions(-)
    
    diff --git a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    index 70449763856cd..800c2518f9a9e 100644
    --- a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    +++ b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    @@ -5,26 +5,23 @@ describe('HeatmapCtrl', function() {
       let ctx = {};
     
       let $injector = {
    -      get: () => {}
    +    get: () => {},
       };
     
       let $scope = {
         $on: () => {},
    -    events: {
    -        on: () => {}
    -    }
       };
     
    -HeatmapCtrl.prototype.panel = {
    +  HeatmapCtrl.prototype.panel = {
         events: {
    -        on: () => {},
    -        emit: () => {}
    -    }
    -};
    +      on: () => {},
    +      emit: () => {},
    +    },
    +  };
     
    -    beforeEach(() => {
    -        ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    -    });
    +  beforeEach(() => {
    +    ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    +  });
     
       describe('when time series are outside range', function() {
         beforeEach(function() {
    
    From bc9b6ddefe9c982b778d699c7c445db081982fbd Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:14:27 +0200
    Subject: [PATCH 078/104] document metric column prefix for mysql and mssql
    
    ---
     docs/sources/features/datasources/mssql.md | 2 +-
     docs/sources/features/datasources/mysql.md | 2 +-
     2 files changed, 2 insertions(+), 2 deletions(-)
    
    diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
    index d4d5cc6d73ecc..bcb965dda74dc 100644
    --- a/docs/sources/features/datasources/mssql.md
    +++ b/docs/sources/features/datasources/mssql.md
    @@ -148,7 +148,7 @@ The resulting table panel:
     
     ## Time series queries
     
    -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
    +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
     
     **Example database table:**
     
    diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
    index ce50053c7eab9..c6e620eb08b2e 100644
    --- a/docs/sources/features/datasources/mysql.md
    +++ b/docs/sources/features/datasources/mysql.md
    @@ -103,7 +103,7 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column.
    +You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
     
     **Example with `metric` column:**
     
    
    From 036647ae35b9e6799d5af9b984a47a5907c40d6a Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:18:45 +0200
    Subject: [PATCH 079/104] document metric column prefix in query editor
    
    ---
     .../app/plugins/datasource/mssql/partials/query.editor.html | 6 ++++--
     .../app/plugins/datasource/mysql/partials/query.editor.html | 5 ++++-
     2 files changed, 8 insertions(+), 3 deletions(-)
    
    diff --git a/public/app/plugins/datasource/mssql/partials/query.editor.html b/public/app/plugins/datasource/mssql/partials/query.editor.html
    index ddc24475d6079..397a35164c08c 100644
    --- a/public/app/plugins/datasource/mssql/partials/query.editor.html
    +++ b/public/app/plugins/datasource/mssql/partials/query.editor.html
    @@ -39,9 +39,11 @@
     	
    Time series:
     - return column named time (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
    -- optional: return column named metric to represent the series names.
     - any other columns returned will be the time point values.
    -- if multiple value columns are present and a metric column is provided. the series name will be the combination of "MetricName - ValueColumnName".
    +Optional:
    +  - return column named metric to represent the series name.
    +  - If multiple value columns are returned the metric column is used as prefix.
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    diff --git a/public/app/plugins/datasource/mysql/partials/query.editor.html b/public/app/plugins/datasource/mysql/partials/query.editor.html
    index df68982fcfa1b..d4be22fc3e90b 100644
    --- a/public/app/plugins/datasource/mysql/partials/query.editor.html
    +++ b/public/app/plugins/datasource/mysql/partials/query.editor.html
    @@ -40,7 +40,10 @@
     		
    Time series:
     - return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
     - return column(s) with numeric datatype as values
    -- (Optional: return column named metric to represent the series name. If no column named metric is found the column name of the value column is used as series name)
    +Optional:
    +  - return column named metric to represent the series name.
    +  - If multiple value columns are returned the metric column is used as prefix.
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    
    From e487fabcd56f5a04b8fa5a6cba6a020855f2d062 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:54:51 +0200
    Subject: [PATCH 080/104] add metric column prefix test for mysql
    
    ---
     pkg/tsdb/mysql/mysql_test.go | 25 +++++++++++++++++++++++++
     1 file changed, 25 insertions(+)
    
    diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go
    index 3b4e283b726bf..9947c23498bde 100644
    --- a/pkg/tsdb/mysql/mysql_test.go
    +++ b/pkg/tsdb/mysql/mysql_test.go
    @@ -634,6 +634,31 @@ func TestMySQL(t *testing.T) {
     				So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
     			})
     
    +			Convey("When doing a metric query with metric column and multiple value columns", func() {
    +				query := &tsdb.TsdbQuery{
    +					Queries: []*tsdb.Query{
    +						{
    +							Model: simplejson.NewFromAny(map[string]interface{}{
    +								"rawSql": `SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values ORDER BY 1,2`,
    +								"format": "time_series",
    +							}),
    +							RefId: "A",
    +						},
    +					},
    +				}
    +
    +				resp, err := endpoint.Query(nil, nil, query)
    +				So(err, ShouldBeNil)
    +				queryResult := resp.Results["A"]
    +				So(queryResult.Error, ShouldBeNil)
    +
    +				So(len(queryResult.Series), ShouldEqual, 4)
    +				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
    +			})
    +
     			Convey("When doing a metric query grouping by time should return correct series", func() {
     				query := &tsdb.TsdbQuery{
     					Queries: []*tsdb.Query{
    
    From 3aa4790979cf457a26754afd67f5235fc3345f62 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 18:13:19 +0200
    Subject: [PATCH 081/104] add tests for metric column prefix to mssql
    
    ---
     pkg/tsdb/mssql/mssql_test.go | 25 +++++++++++++++++++++++++
     1 file changed, 25 insertions(+)
    
    diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
    index 86484cb9d5e4d..8e3d617ca09b2 100644
    --- a/pkg/tsdb/mssql/mssql_test.go
    +++ b/pkg/tsdb/mssql/mssql_test.go
    @@ -610,6 +610,31 @@ func TestMSSQL(t *testing.T) {
     				So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
     			})
     
    +			Convey("When doing a metric query with metric column and multiple value columns", func() {
    +				query := &tsdb.TsdbQuery{
    +					Queries: []*tsdb.Query{
    +						{
    +							Model: simplejson.NewFromAny(map[string]interface{}{
    +								"rawSql": "SELECT $__timeEpoch(time), measurement AS metric, valueOne, valueTwo FROM metric_values ORDER BY 1",
    +								"format": "time_series",
    +							}),
    +							RefId: "A",
    +						},
    +					},
    +				}
    +
    +				resp, err := endpoint.Query(nil, nil, query)
    +				So(err, ShouldBeNil)
    +				queryResult := resp.Results["A"]
    +				So(queryResult.Error, ShouldBeNil)
    +
    +				So(len(queryResult.Series), ShouldEqual, 4)
    +				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
    +			})
    +
     			Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
     				sql := `
     						IF object_id('sp_test_epoch') IS NOT NULL
    
    From e37e8cb38c649796db57a39868d4c3c79ddab9fd Mon Sep 17 00:00:00 2001
    From: Jan Garaj 
    Date: Mon, 30 Jul 2018 08:02:16 +0100
    Subject: [PATCH 082/104] Add missing tls_skip_verify_insecure (#12748)
    
    ---
     conf/defaults.ini | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/conf/defaults.ini b/conf/defaults.ini
    index 5faba3ea7bd48..6c27886c649fe 100644
    --- a/conf/defaults.ini
    +++ b/conf/defaults.ini
    @@ -311,6 +311,7 @@ token_url =
     api_url =
     team_ids =
     allowed_organizations =
    +tls_skip_verify_insecure = false
     
     #################################### Basic Auth ##########################
     [auth.basic]
    
    From 13a7b638bcc90ff6abcf00a388d8dfbedf01a8b2 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 10:19:51 +0200
    Subject: [PATCH 083/104] changelog: add notes about closing #12747
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index ad1b63234e9f3..4a2c3c7a0af95 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -24,6 +24,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
    +* **Auth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
     
     # 5.2.2 (2018-07-25)
     
    
    From e4983cba2fc17de8523814b7126e5c2d858ac569 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 10:21:22 +0200
    Subject: [PATCH 084/104] changelog: update
    
    [skip ci]
    ---
     CHANGELOG.md | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index 4a2c3c7a0af95..b8f5bced97271 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -24,7 +24,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
    -* **Auth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    +* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
     
     # 5.2.2 (2018-07-25)
     
    
    From 3d4a346c6621c6e685d338dc95aed0221c84c541 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Mon, 30 Jul 2018 13:02:08 +0200
    Subject: [PATCH 085/104] Begin conversion
    
    ---
     .../prometheus/specs/_datasource.jest.ts      | 792 ++++++++++++++++++
     1 file changed, 792 insertions(+)
     create mode 100644 public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    new file mode 100644
    index 0000000000000..384abc8f90274
    --- /dev/null
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -0,0 +1,792 @@
    +import moment from 'moment';
    +import { PrometheusDatasource } from '../datasource';
    +import $q from 'q';
    +
    +const SECOND = 1000;
    +const MINUTE = 60 * SECOND;
    +const HOUR = 60 * MINUTE;
    +
    +const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    +
    +let ctx = {};
    +let instanceSettings = {
    +  url: 'proxied',
    +  directUrl: 'direct',
    +  user: 'test',
    +  password: 'mupp',
    +  jsonData: { httpMethod: 'GET' },
    +};
    +let backendSrv = {
    +  datasourceRequest: jest.fn(),
    +};
    +
    +let templateSrv = {
    +  replace: (target, scopedVars, format) => {
    +    if (!target) {
    +      return target;
    +    }
    +    let variable, value, fmt;
    +
    +    return target.replace(scopedVars, (match, var1, var2, fmt2, var3, fmt3) => {
    +      variable = this.index[var1 || var2 || var3];
    +      fmt = fmt2 || fmt3 || format;
    +      if (scopedVars) {
    +        value = scopedVars[var1 || var2 || var3];
    +        if (value) {
    +          return this.formatValue(value.value, fmt, variable);
    +        }
    +      }
    +    });
    +  },
    +};
    +
    +let timeSrv = {
    +  timeRange: () => {
    +    return { to: { diff: () => 2000 }, from: '' };
    +  },
    +};
    +
    +describe('PrometheusDatasource', function() {
    +  //   beforeEach(angularMocks.module('grafana.core'));
    +  //   beforeEach(angularMocks.module('grafana.services'));
    +  //   beforeEach(ctx.providePhase(['timeSrv']));
    +
    +  //   beforeEach(
    +  //     angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    +  //       ctx.$q = $q;
    +  //       ctx.$httpBackend = $httpBackend;
    +  //       ctx.$rootScope = $rootScope;
    +  //       ctx.ds = $injector.instantiate(PrometheusDatasource, {
    +  //         instanceSettings: instanceSettings,
    +  //       });
    +  //       $httpBackend.when('GET', /\.html$/).respond('');
    +  //     })
    +  //   );
    +
    +  beforeEach(() => {
    +    ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +  });
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    // Interval alignment with step
    +    var urlExpected =
    +      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    +    var response = {
    +      data: {
    +        status: 'success',
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              values: [[60, '3846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      //   ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +      //   ctx.$httpBackend.flush();
    +    });
    +    it('should generate the correct query', function() {
    +      //   ctx.$httpBackend.verifyNoOutstandingExpectation();
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When querying prometheus with one target which return multiple series', function() {
    +    var results;
    +    var start = 60;
    +    var end = 360;
    +    var step = 60;
    +    // var urlExpected =
    +    //   'proxied/api/v1/query_range?query=' +
    +    //   encodeURIComponent('test{job="testjob"}') +
    +    //   '&start=' +
    +    //   start +
    +    //   '&end=' +
    +    //   end +
    +    //   '&step=' +
    +    //   step;
    +    var query = {
    +      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +              values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +            },
    +            {
    +              metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +              values: [[start + step * 2, '4846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should be same length', function() {
    +      expect(results.data.length).toBe(2);
    +      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    +      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    +    });
    +    it('should fill null until first datapoint in response', function() {
    +      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    +      expect(results.data[0].datapoints[0][0]).toBe(null);
    +      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[0].datapoints[1][0]).toBe(3846);
    +    });
    +    it('should fill null after last datapoint in response', function() {
    +      var length = (end - start) / step + 1;
    +      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    +      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    +      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    +      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    +    });
    +    it('should fill null at gap between series', function() {
    +      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    +      expect(results.data[0].datapoints[2][0]).toBe(null);
    +      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[1].datapoints[1][0]).toBe(null);
    +      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    +      expect(results.data[1].datapoints[3][0]).toBe(null);
    +    });
    +  });
    +  describe('When querying prometheus with one target and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'vector',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              value: [123, '3846'],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When performing annotationQuery', function() {
    +    var results;
    +    // var urlExpected =
    +    //   'proxied/api/v1/query_range?query=' +
    +    //   encodeURIComponent('ALERTS{alertstate="firing"}') +
    +    //   '&start=60&end=180&step=60';
    +    var options = {
    +      annotation: {
    +        expr: 'ALERTS{alertstate="firing"}',
    +        tagKeys: 'job',
    +        titleFormat: '{{alertname}}',
    +        textFormat: '{{instance}}',
    +      },
    +      range: {
    +        from: time({ seconds: 63 }),
    +        to: time({ seconds: 123 }),
    +      },
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: {
    +                __name__: 'ALERTS',
    +                alertname: 'InstanceDown',
    +                alertstate: 'firing',
    +                instance: 'testinstance',
    +                job: 'testjob',
    +              },
    +              values: [[123, '1']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.annotationQuery(options).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should return annotation list', function() {
    +      //   ctx.$rootScope.$apply();
    +      expect(results.length).toBe(1);
    +      expect(results[0].tags).toContain('testjob');
    +      expect(results[0].title).toBe('InstanceDown');
    +      expect(results[0].text).toBe('testinstance');
    +      expect(results[0].time).toBe(123 * 1000);
    +    });
    +  });
    +
    +  describe('When resultFormat is table and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'vector',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              value: [123, '3846'],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should return result', () => {
    +      expect(results).not.toBe(null);
    +    });
    +  });
    +
    +  describe('The "step" query parameter', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be min interval when greater than auto interval', async () => {
    +      let query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('step should never go below 1', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '100ms',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('should be auto interval when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should result in querying fewer than 11000 data points', async () => {
    +      var query = {
    +        // 6 hour range
    +        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '1s',
    +      };
    +      var end = 7 * 60 * 60;
    +      var start = 60 * 60;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not apply min interval when interval * intervalFactor greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      // times get rounded up to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply min interval when interval * intervalFactor smaller', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply intervalFactor to auto interval when greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      // times get aligned to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should be determined by the 11000 data points limit when too small', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +  });
    +
    +  describe('The __interval and __interval_ms template variables', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be unchanged when auto interval is greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be min interval when it is greater than auto interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should account for intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be interval * intervalFactor when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should be min interval when greater than interval * intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[60s])') +
    +        '&start=' +
    +        start +
    +        '&end=' +
    +        end +
    +        '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +  });
    +});
    +
    +describe('PrometheusDatasource for POST', function() {
    +  //   var ctx = new helpers.ServiceTestContext();
    +  let instanceSettings = {
    +    url: 'proxied',
    +    directUrl: 'direct',
    +    user: 'test',
    +    password: 'mupp',
    +    jsonData: { httpMethod: 'POST' },
    +  };
    +
    +  //   beforeEach(angularMocks.module('grafana.core'));
    +  //   beforeEach(angularMocks.module('grafana.services'));
    +  //   beforeEach(ctx.providePhase(['timeSrv']));
    +
    +  //   beforeEach(
    +  //     // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    +  //     //   ctx.$q = $q;
    +  //     //   ctx.$httpBackend = $httpBackend;
    +  //     //   ctx.$rootScope = $rootScope;
    +  //     //   ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    +  //     //   $httpBackend.when('GET', /\.html$/).respond('');
    +  //     // })
    +  //   );
    +
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query_range';
    +    var dataExpected = {
    +      query: 'test{job="testjob"}',
    +      start: 1 * 60,
    +      end: 3 * 60,
    +      step: 60,
    +    };
    +    var query = {
    +      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              values: [[2 * 60, '3846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('POST');
    +      expect(res.url).toBe(urlExpected);
    +      expect(res.data).toEqual(dataExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +});
    
    From e32cf75c2d3caca0d62e3296701d63c9135e2233 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:50:18 +0200
    Subject: [PATCH 086/104] fix usage of metric column types so that you don't
     need to specify metric alias
    
    ---
     pkg/tsdb/sql_engine.go | 5 +++++
     1 file changed, 5 insertions(+)
    
    diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go
    index 027f37fc2433c..29428971c6410 100644
    --- a/pkg/tsdb/sql_engine.go
    +++ b/pkg/tsdb/sql_engine.go
    @@ -75,6 +75,10 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo
     		queryEndpoint.timeColumnNames = config.TimeColumnNames
     	}
     
    +	if len(config.MetricColumnTypes) > 0 {
    +		queryEndpoint.metricColumnTypes = config.MetricColumnTypes
    +	}
    +
     	engineCache.Lock()
     	defer engineCache.Unlock()
     
    @@ -249,6 +253,7 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
     				columnType := columnTypes[i].DatabaseTypeName()
     
     				for _, mct := range e.metricColumnTypes {
    +					e.log.Info(mct)
     					if columnType == mct {
     						metricIndex = i
     						continue
    
    From 38a52c2489853eaff1ce036b864f736c59c9ba49 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:50:52 +0200
    Subject: [PATCH 087/104] mssql: update tests
    
    ---
     pkg/tsdb/mssql/mssql_test.go | 54 ++++++++++--------------------------
     1 file changed, 15 insertions(+), 39 deletions(-)
    
    diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
    index 8e3d617ca09b2..30d1da3bda191 100644
    --- a/pkg/tsdb/mssql/mssql_test.go
    +++ b/pkg/tsdb/mssql/mssql_test.go
    @@ -615,7 +615,7 @@ func TestMSSQL(t *testing.T) {
     					Queries: []*tsdb.Query{
     						{
     							Model: simplejson.NewFromAny(map[string]interface{}{
    -								"rawSql": "SELECT $__timeEpoch(time), measurement AS metric, valueOne, valueTwo FROM metric_values ORDER BY 1",
    +								"rawSql": "SELECT $__timeEpoch(time), measurement, valueOne, valueTwo FROM metric_values ORDER BY 1",
     								"format": "time_series",
     							}),
     							RefId: "A",
    @@ -660,21 +660,9 @@ func TestMSSQL(t *testing.T) {
     
     							SELECT
     								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value one' as metric,
    -								avg(valueOne) as value
    -							FROM
    -								metric_values
    -							WHERE
    -								time BETWEEN DATEADD(s, @from, '1970-01-01') AND DATEADD(s, @to, '1970-01-01') AND
    -								(@metric = 'ALL' OR measurement = @metric)
    -							GROUP BY
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
    -								measurement
    -							UNION ALL
    -							SELECT
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value two' as metric,
    -								avg(valueTwo) as value
    +								measurement as metric,
    +								avg(valueOne) as valueOne,
    +								avg(valueTwo) as valueTwo
     							FROM
     								metric_values
     							WHERE
    @@ -717,10 +705,10 @@ func TestMSSQL(t *testing.T) {
     					So(queryResult.Error, ShouldBeNil)
     
     					So(len(queryResult.Series), ShouldEqual, 4)
    -					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
    -					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
    -					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
    -					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
    +					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
     				})
     			})
     
    @@ -749,21 +737,9 @@ func TestMSSQL(t *testing.T) {
     
     							SELECT
     								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value one' as metric,
    -								avg(valueOne) as value
    -							FROM
    -								metric_values
    -							WHERE
    -								time BETWEEN @from AND @to AND
    -								(@metric = 'ALL' OR measurement = @metric)
    -							GROUP BY
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
    -								measurement
    -							UNION ALL
    -							SELECT
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value two' as metric,
    -								avg(valueTwo) as value
    +								measurement as metric,
    +								avg(valueOne) as valueOne,
    +								avg(valueTwo) as valueTwo
     							FROM
     								metric_values
     							WHERE
    @@ -806,10 +782,10 @@ func TestMSSQL(t *testing.T) {
     					So(queryResult.Error, ShouldBeNil)
     
     					So(len(queryResult.Series), ShouldEqual, 4)
    -					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
    -					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
    -					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
    -					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
    +					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
     				})
     			})
     		})
    
    From 917b6b11b0fbae37d80a5dd097de031327e98679 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:54:57 +0200
    Subject: [PATCH 088/104] devenv: update sql dashboards
    
    ---
     .../datasource_tests_mssql_unittest.json      | 73 ++++---------------
     .../datasource_tests_mysql_unittest.json      | 73 ++++---------------
     .../datasource_tests_postgres_unittest.json   | 73 ++++---------------
     3 files changed, 42 insertions(+), 177 deletions(-)
    
    diff --git a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    index d47cfb0ad6ea3..0c7cc0fcc6515 100644
    --- a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532618879985,
    +  "iteration": 1532949769359,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value one' as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value two' as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1067,14 +1061,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1245,14 +1233,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1423,14 +1405,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1773,14 +1749,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1954,14 +1924,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2135,14 +2099,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2316,14 +2274,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2460,7 +2412,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "mssql"],
    +  "tags": [
    +    "gdev",
    +    "mssql"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2587,5 +2542,5 @@
       "timezone": "",
       "title": "Datasource tests - MSSQL (unit test)",
       "uid": "GlAqcPgmz",
    -  "version": 58
    +  "version": 3
     }
    \ No newline at end of file
    diff --git a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    index 326114ec8ff6d..e95eedf254c07 100644
    --- a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532620354037,
    +  "iteration": 1532949531280,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value one') as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value two') as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1,2\nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1061,14 +1055,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1239,14 +1227,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1417,14 +1399,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1593,14 +1569,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1774,14 +1744,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1955,14 +1919,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2136,14 +2094,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2280,7 +2232,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "mysql"],
    +  "tags": [
    +    "gdev",
    +    "mysql"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2405,5 +2360,5 @@
       "timezone": "",
       "title": "Datasource tests - MySQL (unittest)",
       "uid": "Hmf8FDkmz",
    -  "version": 12
    +  "version": 1
     }
    \ No newline at end of file
    diff --git a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    index 85151089b7f3d..2243baed0aa83 100644
    --- a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532619575136,
    +  "iteration": 1532951521836,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value one' as metric, \n  avg(\"valueOne\") as \"valueOne\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value two' as metric, \n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1049,14 +1043,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1227,14 +1215,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1405,14 +1387,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1581,14 +1557,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1762,14 +1732,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1943,14 +1907,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2124,14 +2082,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2268,7 +2220,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "postgres"],
    +  "tags": [
    +    "gdev",
    +    "postgres"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2397,5 +2352,5 @@
       "timezone": "",
       "title": "Datasource tests - Postgres (unittest)",
       "uid": "vHQdlVziz",
    -  "version": 17
    +  "version": 1
     }
    \ No newline at end of file
    
    From 8a22129177a8f3656cd55b411245d516a16c4c87 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Mon, 30 Jul 2018 14:37:23 +0200
    Subject: [PATCH 089/104] add version note to metric prefix and fix typo
    
    ---
     docs/sources/features/datasources/mssql.md    | 3 ++-
     docs/sources/features/datasources/mysql.md    | 3 ++-
     docs/sources/features/datasources/postgres.md | 3 ++-
     3 files changed, 6 insertions(+), 3 deletions(-)
    
    diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
    index bcb965dda74dc..ea7be8e1c30e3 100644
    --- a/docs/sources/features/datasources/mssql.md
    +++ b/docs/sources/features/datasources/mssql.md
    @@ -148,7 +148,8 @@ The resulting table panel:
     
     ## Time series queries
     
    -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, the name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example database table:**
     
    diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
    index c6e620eb08b2e..22287b2a83829 100644
    --- a/docs/sources/features/datasources/mysql.md
    +++ b/docs/sources/features/datasources/mysql.md
    @@ -103,7 +103,8 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +You may return a column named `metric` that is used as metric name for the value column.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example with `metric` column:**
     
    diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md
    index f3e52ed6652af..793b3b6f4c07f 100644
    --- a/docs/sources/features/datasources/postgres.md
    +++ b/docs/sources/features/datasources/postgres.md
    @@ -101,7 +101,8 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +You may return a column named `metric` that is used as metric name for the value column.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example with `metric` column:**
     
    
    From 9c0fbe5a0b3c2e334cff6d6bbe2cb4d5ae48a5fd Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Mon, 30 Jul 2018 16:19:31 +0200
    Subject: [PATCH 090/104] fixed that missing one
    
    ---
     public/app/core/utils/kbn.ts | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 74ef2a9e87405..7bf2cdc5fd67c 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -1118,7 +1118,7 @@ kbn.getUnitFormats = function() {
           submenu: [
             { text: 'parts-per-million (ppm)', value: 'ppm' },
             { text: 'parts-per-billion (ppb)', value: 'conppb' },
    -        { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' },
    +        { text: 'nanogram per cubic metre (ng/m³)', value: 'conngm3' },
             { text: 'nanogram per normal cubic metre (ng/Nm³)', value: 'conngNm3' },
             { text: 'microgram per cubic metre (μg/m³)', value: 'conμgm3' },
             { text: 'microgram per normal cubic metre (μg/Nm³)', value: 'conμgNm3' },
    
    From 4fa979649cf412c491a1d9d42d1d0062b13ff55d Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Mon, 30 Jul 2018 16:28:19 +0200
    Subject: [PATCH 091/104] also fixed "Watt per square metre"
    
    ---
     public/app/core/utils/kbn.ts | 4 ++--
     1 file changed, 2 insertions(+), 2 deletions(-)
    
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 7bf2cdc5fd67c..c2764670b95c0 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -500,7 +500,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W');
     kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1);
     kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1);
     kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1);
    -kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2');
    +kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m²');
     kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA');
     kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1);
     kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var');
    @@ -1021,7 +1021,7 @@ kbn.getUnitFormats = function() {
             { text: 'Watt (W)', value: 'watt' },
             { text: 'Kilowatt (kW)', value: 'kwatt' },
             { text: 'Milliwatt (mW)', value: 'mwatt' },
    -        { text: 'Watt per square metre (W/m2)', value: 'Wm2' },
    +        { text: 'Watt per square metre (W/m²)', value: 'Wm2' },
             { text: 'Volt-ampere (VA)', value: 'voltamp' },
             { text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' },
             { text: 'Volt-ampere reactive (var)', value: 'voltampreact' },
    
    From 88d8072be3cd17ee7461481f1c17c51e69ed36b3 Mon Sep 17 00:00:00 2001
    From: Jason Pereira 
    Date: Mon, 30 Jul 2018 15:51:15 +0100
    Subject: [PATCH 092/104] add aws_dx to cloudwatch datasource
    
    ---
     pkg/tsdb/cloudwatch/metric_find_query.go | 2 ++
     1 file changed, 2 insertions(+)
    
    diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go
    index 136ee241c2e5a..d2bd135ecc9ad 100644
    --- a/pkg/tsdb/cloudwatch/metric_find_query.go
    +++ b/pkg/tsdb/cloudwatch/metric_find_query.go
    @@ -46,6 +46,7 @@ func init() {
     		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
     		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
     		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
    +		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
     		"AWS/DynamoDB":       {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "TimeToLiveDeletedItemCount", "ThrottledRequests", "UserErrors", "WriteThrottleEvents"},
     		"AWS/EBS":            {"VolumeReadBytes", "VolumeWriteBytes", "VolumeReadOps", "VolumeWriteOps", "VolumeTotalReadTime", "VolumeTotalWriteTime", "VolumeIdleTime", "VolumeQueueLength", "VolumeThroughputPercentage", "VolumeConsumedReadWriteOps", "BurstBalance"},
     		"AWS/EC2":            {"CPUCreditUsage", "CPUCreditBalance", "CPUUtilization", "DiskReadOps", "DiskWriteOps", "DiskReadBytes", "DiskWriteBytes", "NetworkIn", "NetworkOut", "NetworkPacketsIn", "NetworkPacketsOut", "StatusCheckFailed", "StatusCheckFailed_Instance", "StatusCheckFailed_System"},
    @@ -118,6 +119,7 @@ func init() {
     		"AWS/CloudFront":       {"DistributionId", "Region"},
     		"AWS/CloudSearch":      {},
     		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
    +		"AWS/DX":               {"ConnectionId"},
     		"AWS/DynamoDB":         {"TableName", "GlobalSecondaryIndexName", "Operation", "StreamLabel"},
     		"AWS/EBS":              {"VolumeId"},
     		"AWS/EC2":              {"AutoScalingGroupName", "ImageId", "InstanceId", "InstanceType"},
    
    From 162d3e8036f8365e294502b6dcd496518c951a5b Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 17:03:01 +0200
    Subject: [PATCH 093/104] changelog: add notes about closing #12727
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index b8f5bced97271..c2e8c5c788e73 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -16,6 +16,7 @@
     * **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
     * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
     * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
    +* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
     * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
     * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
     * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
    
    From ad84a145f56f1fc1a8d513014c05ef40326f89a4 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 17:03:24 +0200
    Subject: [PATCH 094/104] changelog: add notes about closing #12744
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index c2e8c5c788e73..11baca97714d6 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -26,6 +26,7 @@
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    +* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
     
     # 5.2.2 (2018-07-25)
     
    
    From e4c2476f3c898879fa6be89c18e1ea325bf88c13 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Tue, 31 Jul 2018 09:35:08 +0200
    Subject: [PATCH 095/104] Weird execution order for the tests...
    
    ---
     .../datasource/prometheus/datasource.ts       |  7 +++++-
     .../prometheus/result_transformer.ts          |  7 +++++-
     .../prometheus/specs/_datasource.jest.ts      | 25 +++----------------
     3 files changed, 15 insertions(+), 24 deletions(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts
    index 75a946d6f3687..6801a9a1d5920 100644
    --- a/public/app/plugins/datasource/prometheus/datasource.ts
    +++ b/public/app/plugins/datasource/prometheus/datasource.ts
    @@ -175,8 +175,12 @@ export class PrometheusDatasource {
               responseIndex: index,
               refId: activeTargets[index].refId,
             };
    -
    +        console.log('format: ' + transformerOptions.format);
    +        console.log('resultType: ' + response.data.data.resultType);
    +        console.log('legendFormat: ' + transformerOptions.legendFormat);
    +        // console.log(result);
             this.resultTransformer.transform(result, response, transformerOptions);
    +        // console.log(result);
           });
     
           return { data: result };
    @@ -233,6 +237,7 @@ export class PrometheusDatasource {
         if (start > end) {
           throw { message: 'Invalid time range' };
         }
    +    // console.log(query.expr);
     
         var url = '/api/v1/query_range';
         var data = {
    diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts
    index b6d8a32af5f50..4b69cb98c5474 100644
    --- a/public/app/plugins/datasource/prometheus/result_transformer.ts
    +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts
    @@ -6,7 +6,9 @@ export class ResultTransformer {
     
       transform(result: any, response: any, options: any) {
         let prometheusResult = response.data.data.result;
    -
    +    console.log(prometheusResult);
    +    // console.log(options);
    +    // console.log(result);
         if (options.format === 'table') {
           result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId));
         } else if (options.format === 'heatmap') {
    @@ -26,6 +28,7 @@ export class ResultTransformer {
             }
           }
         }
    +    // console.log(result);
       }
     
       transformMetricData(metricData, options, start, end) {
    @@ -137,6 +140,7 @@ export class ResultTransformer {
         if (!label || label === '{}') {
           label = options.query;
         }
    +    console.log(label);
         return label;
       }
     
    @@ -156,6 +160,7 @@ export class ResultTransformer {
         var labelPart = _.map(_.toPairs(labelData), function(label) {
           return label[0] + '="' + label[1] + '"';
         }).join(',');
    +    console.log(metricName);
         return metricName + '{' + labelPart + '}';
       }
     
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 384abc8f90274..34f78585d76f5 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -21,23 +21,7 @@ let backendSrv = {
     };
     
     let templateSrv = {
    -  replace: (target, scopedVars, format) => {
    -    if (!target) {
    -      return target;
    -    }
    -    let variable, value, fmt;
    -
    -    return target.replace(scopedVars, (match, var1, var2, fmt2, var3, fmt3) => {
    -      variable = this.index[var1 || var2 || var3];
    -      fmt = fmt2 || fmt3 || format;
    -      if (scopedVars) {
    -        value = scopedVars[var1 || var2 || var3];
    -        if (value) {
    -          return this.formatValue(value.value, fmt, variable);
    -        }
    -      }
    -    });
    -  },
    +  replace: jest.fn(str => str),
     };
     
     let timeSrv = {
    @@ -63,10 +47,7 @@ describe('PrometheusDatasource', function() {
       //     })
       //   );
     
    -  beforeEach(() => {
    -    ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -  });
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    +  describe('When querying prometheus with one target using query editor target spec', async () => {
         var results;
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    @@ -106,7 +87,7 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', async () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
    
    From f1f0400769f01c99101914cb1ba62cca0e64ac94 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Tue, 31 Jul 2018 11:41:58 +0200
    Subject: [PATCH 096/104] changelog: add notes about closing #12300
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index 11baca97714d6..d3532ebe64023 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -23,6 +23,7 @@
     * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
     * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
    +* **Cloudwatch**: AWS/AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From 276a5e6eb5603df07d48aa66af4763bc9f3576c8 Mon Sep 17 00:00:00 2001
    From: =?UTF-8?q?Torkel=20=C3=96degaard?= 
    Date: Tue, 31 Jul 2018 17:29:02 +0200
    Subject: [PATCH 097/104] fix: test data api route used old name for test data
     datasource, fixes #12773
    
    ---
     pkg/api/metrics.go | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go
    index 00ad25ab8c2f9..f2bc79df7ad61 100644
    --- a/pkg/api/metrics.go
    +++ b/pkg/api/metrics.go
    @@ -99,7 +99,7 @@ func GetTestDataRandomWalk(c *m.ReqContext) Response {
     	timeRange := tsdb.NewTimeRange(from, to)
     	request := &tsdb.TsdbQuery{TimeRange: timeRange}
     
    -	dsInfo := &m.DataSource{Type: "grafana-testdata-datasource"}
    +	dsInfo := &m.DataSource{Type: "testdata"}
     	request.Queries = append(request.Queries, &tsdb.Query{
     		RefId:      "A",
     		IntervalMs: intervalMs,
    
    From 89eae1566d036e153aea18eb62e983bc21bd315f Mon Sep 17 00:00:00 2001
    From: =?UTF-8?q?Torkel=20=C3=96degaard?= 
    Date: Tue, 31 Jul 2018 17:31:45 +0200
    Subject: [PATCH 098/104] fix: team email tooltip was not showing
    
    ---
     public/app/core/components/Forms/Forms.tsx | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx
    index 4b74d48ba08bf..543e1a1d6dfbf 100644
    --- a/public/app/core/components/Forms/Forms.tsx
    +++ b/public/app/core/components/Forms/Forms.tsx
    @@ -12,7 +12,7 @@ export const Label: SFC = props => {
         
           {props.children}
           {props.tooltip && (
    -        
    +        
               
             
           )}
    
    From 6df3722a35faf455e2d25989a80a8e167531b5b7 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Tue, 31 Jul 2018 18:01:36 +0200
    Subject: [PATCH 099/104] changelog: add notes about closing #12762
    
    [skip ci]
    ---
     CHANGELOG.md | 3 ++-
     1 file changed, 2 insertions(+), 1 deletion(-)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index d3532ebe64023..dde7ead6f1362 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -23,7 +23,8 @@
     * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
     * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
    -* **Cloudwatch**: AWS/AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
    +* **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
    +* **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From d6158bc2935ec396f45114d736e684bb3a522c6b Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 09:30:26 +0200
    Subject: [PATCH 100/104] All tests passing
    
    ---
     .../datasource/prometheus/datasource.ts       |   6 -
     .../prometheus/result_transformer.ts          |   7 +-
     .../prometheus/specs/_datasource.jest.ts      | 317 ++++----
     .../prometheus/specs/datasource_specs.ts      | 683 ------------------
     4 files changed, 188 insertions(+), 825 deletions(-)
     delete mode 100644 public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts
    index 6801a9a1d5920..ac8d774db5914 100644
    --- a/public/app/plugins/datasource/prometheus/datasource.ts
    +++ b/public/app/plugins/datasource/prometheus/datasource.ts
    @@ -175,12 +175,7 @@ export class PrometheusDatasource {
               responseIndex: index,
               refId: activeTargets[index].refId,
             };
    -        console.log('format: ' + transformerOptions.format);
    -        console.log('resultType: ' + response.data.data.resultType);
    -        console.log('legendFormat: ' + transformerOptions.legendFormat);
    -        // console.log(result);
             this.resultTransformer.transform(result, response, transformerOptions);
    -        // console.log(result);
           });
     
           return { data: result };
    @@ -237,7 +232,6 @@ export class PrometheusDatasource {
         if (start > end) {
           throw { message: 'Invalid time range' };
         }
    -    // console.log(query.expr);
     
         var url = '/api/v1/query_range';
         var data = {
    diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts
    index 4b69cb98c5474..b6d8a32af5f50 100644
    --- a/public/app/plugins/datasource/prometheus/result_transformer.ts
    +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts
    @@ -6,9 +6,7 @@ export class ResultTransformer {
     
       transform(result: any, response: any, options: any) {
         let prometheusResult = response.data.data.result;
    -    console.log(prometheusResult);
    -    // console.log(options);
    -    // console.log(result);
    +
         if (options.format === 'table') {
           result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId));
         } else if (options.format === 'heatmap') {
    @@ -28,7 +26,6 @@ export class ResultTransformer {
             }
           }
         }
    -    // console.log(result);
       }
     
       transformMetricData(metricData, options, start, end) {
    @@ -140,7 +137,6 @@ export class ResultTransformer {
         if (!label || label === '{}') {
           label = options.query;
         }
    -    console.log(label);
         return label;
       }
     
    @@ -160,7 +156,6 @@ export class ResultTransformer {
         var labelPart = _.map(_.toPairs(labelData), function(label) {
           return label[0] + '="' + label[1] + '"';
         }).join(',');
    -    console.log(metricName);
         return metricName + '{' + labelPart + '}';
       }
     
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 34f78585d76f5..2deab13a10108 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -1,6 +1,7 @@
     import moment from 'moment';
     import { PrometheusDatasource } from '../datasource';
     import $q from 'q';
    +import { angularMocks } from 'test/lib/common';
     
     const SECOND = 1000;
     const MINUTE = 60 * SECOND;
    @@ -57,32 +58,31 @@ describe('PrometheusDatasource', function() {
         // Interval alignment with step
         var urlExpected =
           'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -    var response = {
    -      data: {
    -        status: 'success',
    +
    +    beforeEach(async () => {
    +      let response = {
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              values: [[60, '3846']],
    -            },
    -          ],
    +          status: 'success',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[60, '3846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    -      //   ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    +      };
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
           await ctx.ds.query(query).then(function(data) {
             results = data;
           });
    -      //   ctx.$httpBackend.flush();
         });
    +
         it('should generate the correct query', function() {
    -      //   ctx.$httpBackend.verifyNoOutstandingExpectation();
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
    @@ -97,39 +97,33 @@ describe('PrometheusDatasource', function() {
         var start = 60;
         var end = 360;
         var step = 60;
    -    // var urlExpected =
    -    //   'proxied/api/v1/query_range?query=' +
    -    //   encodeURIComponent('test{job="testjob"}') +
    -    //   '&start=' +
    -    //   start +
    -    //   '&end=' +
    -    //   end +
    -    //   '&step=' +
    -    //   step;
    +
         var query = {
           range: { from: time({ seconds: start }), to: time({ seconds: end }) },
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -              values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -            },
    -            {
    -              metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -              values: [[start + step * 2, '4846']],
    -            },
    -          ],
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +              },
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +                values: [[start + step * 2, '4846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -137,11 +131,13 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    +
         it('should be same length', function() {
           expect(results.data.length).toBe(2);
           expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
           expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
         });
    +
         it('should fill null until first datapoint in response', function() {
           expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
           expect(results.data[0].datapoints[0][0]).toBe(null);
    @@ -172,21 +168,23 @@ describe('PrometheusDatasource', function() {
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'vector',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              value: [123, '3846'],
    -            },
    -          ],
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -206,10 +204,7 @@ describe('PrometheusDatasource', function() {
       });
       describe('When performing annotationQuery', function() {
         var results;
    -    // var urlExpected =
    -    //   'proxied/api/v1/query_range?query=' +
    -    //   encodeURIComponent('ALERTS{alertstate="firing"}') +
    -    //   '&start=60&end=180&step=60';
    +
         var options = {
           annotation: {
             expr: 'ALERTS{alertstate="firing"}',
    @@ -222,27 +217,29 @@ describe('PrometheusDatasource', function() {
             to: time({ seconds: 123 }),
           },
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: {
    -                __name__: 'ALERTS',
    -                alertname: 'InstanceDown',
    -                alertstate: 'firing',
    -                instance: 'testinstance',
    -                job: 'testjob',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: {
    +                  __name__: 'ALERTS',
    +                  alertname: 'InstanceDown',
    +                  alertstate: 'firing',
    +                  instance: 'testinstance',
    +                  job: 'testjob',
    +                },
    +                values: [[123, '1']],
                   },
    -              values: [[123, '1']],
    -            },
    -          ],
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -262,28 +259,29 @@ describe('PrometheusDatasource', function() {
     
       describe('When resultFormat is table and instant = true', function() {
         var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    // var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'vector',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              value: [123, '3846'],
    -            },
    -          ],
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    +      };
     
    -    beforeEach(async () => {
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query).then(function(data) {
    @@ -520,9 +518,13 @@ describe('PrometheusDatasource', function() {
               __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
             },
           };
    +
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
     
    +      templateSrv.replace = jest.fn(str => str);
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
    @@ -530,10 +532,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('10s');
    -      expect(query.scopedVars.__interval.value).toBe('10s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
         });
         it('should be min interval when it is greater than auto interval', async () => {
           var query = {
    @@ -552,18 +560,27 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should account for intervalFactor', async () => {
           var query = {
    @@ -583,14 +600,28 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=0&end=500&step=100';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +
           expect(query.scopedVars.__interval.text).toBe('10s');
           expect(query.scopedVars.__interval.value).toBe('10s');
           expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    @@ -614,7 +645,11 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=50&end=450&step=50';
    +
    +      templateSrv.replace = jest.fn(str => str);
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
    @@ -622,10 +657,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should be min interval when greater than interval * intervalFactor', async () => {
           var query = {
    @@ -645,7 +686,9 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=15';
     
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    @@ -654,10 +697,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
           var query = {
    @@ -679,23 +728,30 @@ describe('PrometheusDatasource', function() {
           var start = 0;
           var urlExpected =
             'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[60s])') +
    +        encodeURIComponent('rate(test[$__interval])') +
             '&start=' +
             start +
             '&end=' +
             end +
             '&step=60';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
       });
     });
    @@ -738,21 +794,22 @@ describe('PrometheusDatasource for POST', function() {
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              values: [[2 * 60, '3846']],
    -            },
    -          ],
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[2 * 60, '3846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query).then(function(data) {
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    deleted file mode 100644
    index c5da671b75768..0000000000000
    --- a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    +++ /dev/null
    @@ -1,683 +0,0 @@
    -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
    -import moment from 'moment';
    -import $ from 'jquery';
    -import helpers from 'test/specs/helpers';
    -import { PrometheusDatasource } from '../datasource';
    -
    -const SECOND = 1000;
    -const MINUTE = 60 * SECOND;
    -const HOUR = 60 * MINUTE;
    -
    -const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    -
    -describe('PrometheusDatasource', function() {
    -  var ctx = new helpers.ServiceTestContext();
    -  var instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'GET' },
    -  };
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  beforeEach(
    -    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -      ctx.$q = $q;
    -      ctx.$httpBackend = $httpBackend;
    -      ctx.$rootScope = $rootScope;
    -      ctx.ds = $injector.instantiate(PrometheusDatasource, {
    -        instanceSettings: instanceSettings,
    -      });
    -      $httpBackend.when('GET', /\.html$/).respond('');
    -    })
    -  );
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    // Interval alignment with step
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            values: [[60, '3846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    -    var results;
    -    var start = 60;
    -    var end = 360;
    -    var step = 60;
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' +
    -      encodeURIComponent('test{job="testjob"}') +
    -      '&start=' +
    -      start +
    -      '&end=' +
    -      end +
    -      '&step=' +
    -      step;
    -    var query = {
    -      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -            values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -          },
    -          {
    -            metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -            values: [[start + step * 2, '4846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should be same length', function() {
    -      expect(results.data.length).to.be(2);
    -      expect(results.data[0].datapoints.length).to.be((end - start) / step + 1);
    -      expect(results.data[1].datapoints.length).to.be((end - start) / step + 1);
    -    });
    -    it('should fill null until first datapoint in response', function() {
    -      expect(results.data[0].datapoints[0][1]).to.be(start * 1000);
    -      expect(results.data[0].datapoints[0][0]).to.be(null);
    -      expect(results.data[0].datapoints[1][1]).to.be((start + step * 1) * 1000);
    -      expect(results.data[0].datapoints[1][0]).to.be(3846);
    -    });
    -    it('should fill null after last datapoint in response', function() {
    -      var length = (end - start) / step + 1;
    -      expect(results.data[0].datapoints[length - 2][1]).to.be((end - step * 1) * 1000);
    -      expect(results.data[0].datapoints[length - 2][0]).to.be(3848);
    -      expect(results.data[0].datapoints[length - 1][1]).to.be(end * 1000);
    -      expect(results.data[0].datapoints[length - 1][0]).to.be(null);
    -    });
    -    it('should fill null at gap between series', function() {
    -      expect(results.data[0].datapoints[2][1]).to.be((start + step * 2) * 1000);
    -      expect(results.data[0].datapoints[2][0]).to.be(null);
    -      expect(results.data[1].datapoints[1][1]).to.be((start + step * 1) * 1000);
    -      expect(results.data[1].datapoints[1][0]).to.be(null);
    -      expect(results.data[1].datapoints[3][1]).to.be((start + step * 3) * 1000);
    -      expect(results.data[1].datapoints[3][0]).to.be(null);
    -    });
    -  });
    -  describe('When querying prometheus with one target and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'vector',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            value: [123, '3846'],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -  describe('When performing annotationQuery', function() {
    -    var results;
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' +
    -      encodeURIComponent('ALERTS{alertstate="firing"}') +
    -      '&start=60&end=180&step=60';
    -    var options = {
    -      annotation: {
    -        expr: 'ALERTS{alertstate="firing"}',
    -        tagKeys: 'job',
    -        titleFormat: '{{alertname}}',
    -        textFormat: '{{instance}}',
    -      },
    -      range: {
    -        from: time({ seconds: 63 }),
    -        to: time({ seconds: 123 }),
    -      },
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: {
    -              __name__: 'ALERTS',
    -              alertname: 'InstanceDown',
    -              alertstate: 'firing',
    -              instance: 'testinstance',
    -              job: 'testjob',
    -            },
    -            values: [[123, '1']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.annotationQuery(options).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should return annotation list', function() {
    -      ctx.$rootScope.$apply();
    -      expect(results.length).to.be(1);
    -      expect(results[0].tags).to.contain('testjob');
    -      expect(results[0].title).to.be('InstanceDown');
    -      expect(results[0].text).to.be('testinstance');
    -      expect(results[0].time).to.be(123 * 1000);
    -    });
    -  });
    -
    -  describe('When resultFormat is table and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'vector',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            value: [123, '3846'],
    -          },
    -        ],
    -      },
    -    };
    -
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -
    -    it('should return result', () => {
    -      expect(results).not.to.be(null);
    -    });
    -  });
    -
    -  describe('The "step" query parameter', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [],
    -      },
    -    };
    -
    -    it('should be min interval when greater than auto interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -
    -    it('step should never go below 1', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '100ms',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -
    -    it('should be auto interval when greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should result in querying fewer than 11000 data points', function() {
    -      var query = {
    -        // 6 hour range
    -        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '1s',
    -      };
    -      var end = 7 * 60 * 60;
    -      var start = 60 * 60;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should not apply min interval when interval * intervalFactor greater', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      // times get rounded up to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should apply min interval when interval * intervalFactor smaller', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should apply intervalFactor to auto interval when greater', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      // times get aligned to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should not not be affected by the 11000 data points limit when large enough', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should be determined by the 11000 data points limit when too small', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -  });
    -
    -  describe('The __interval and __interval_ms template variables', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [],
    -      },
    -    };
    -
    -    it('should be unchanged when auto interval is greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('10s');
    -      expect(query.scopedVars.__interval.value).to.be('10s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
    -    });
    -    it('should be min interval when it is greater than auto interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should account for intervalFactor', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('10s');
    -      expect(query.scopedVars.__interval.value).to.be('10s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
    -    });
    -    it('should be interval * intervalFactor when greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should be min interval when greater than interval * intervalFactor', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[60s])') +
    -        '&start=' +
    -        start +
    -        '&end=' +
    -        end +
    -        '&step=60';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -  });
    -});
    -
    -describe('PrometheusDatasource for POST', function() {
    -  var ctx = new helpers.ServiceTestContext();
    -  var instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'POST' },
    -  };
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  beforeEach(
    -    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -      ctx.$q = $q;
    -      ctx.$httpBackend = $httpBackend;
    -      ctx.$rootScope = $rootScope;
    -      ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    -      $httpBackend.when('GET', /\.html$/).respond('');
    -    })
    -  );
    -
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query_range';
    -    var dataExpected = $.param({
    -      query: 'test{job="testjob"}',
    -      start: 1 * 60,
    -      end: 3 * 60,
    -      step: 60,
    -    });
    -    var query = {
    -      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            values: [[2 * 60, '3846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expectPOST(urlExpected, dataExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -});
    
    From 790aadf8ef3544eb0c1007042525c7ad54f611e2 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 10:09:05 +0200
    Subject: [PATCH 101/104] Remove angularMocks
    
    ---
     .../app/plugins/datasource/prometheus/specs/_datasource.jest.ts  | 1 -
     1 file changed, 1 deletion(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 2deab13a10108..efe2738cce94c 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -1,7 +1,6 @@
     import moment from 'moment';
     import { PrometheusDatasource } from '../datasource';
     import $q from 'q';
    -import { angularMocks } from 'test/lib/common';
     
     const SECOND = 1000;
     const MINUTE = 60 * SECOND;
    
    From 8d0c4cdc09c04a05f20d3988380613a3f9f1e87f Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Wed, 1 Aug 2018 12:30:50 +0200
    Subject: [PATCH 102/104] changelog: add notes about closing #12561
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index dde7ead6f1362..aa089b5900b05 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -25,6 +25,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
     * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
    +* **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From af32bfebefcc02170fbaa4104ae2e5883b5c1ba8 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 14:26:29 +0200
    Subject: [PATCH 103/104] Add all tests to one file
    
    ---
     .../prometheus/specs/_datasource.jest.ts      | 829 ------------------
     .../prometheus/specs/datasource.jest.ts       | 794 +++++++++++++++++
     2 files changed, 794 insertions(+), 829 deletions(-)
     delete mode 100644 public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    deleted file mode 100644
    index efe2738cce94c..0000000000000
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ /dev/null
    @@ -1,829 +0,0 @@
    -import moment from 'moment';
    -import { PrometheusDatasource } from '../datasource';
    -import $q from 'q';
    -
    -const SECOND = 1000;
    -const MINUTE = 60 * SECOND;
    -const HOUR = 60 * MINUTE;
    -
    -const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    -
    -let ctx = {};
    -let instanceSettings = {
    -  url: 'proxied',
    -  directUrl: 'direct',
    -  user: 'test',
    -  password: 'mupp',
    -  jsonData: { httpMethod: 'GET' },
    -};
    -let backendSrv = {
    -  datasourceRequest: jest.fn(),
    -};
    -
    -let templateSrv = {
    -  replace: jest.fn(str => str),
    -};
    -
    -let timeSrv = {
    -  timeRange: () => {
    -    return { to: { diff: () => 2000 }, from: '' };
    -  },
    -};
    -
    -describe('PrometheusDatasource', function() {
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  //   beforeEach(
    -  //     angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -  //       ctx.$q = $q;
    -  //       ctx.$httpBackend = $httpBackend;
    -  //       ctx.$rootScope = $rootScope;
    -  //       ctx.ds = $injector.instantiate(PrometheusDatasource, {
    -  //         instanceSettings: instanceSettings,
    -  //       });
    -  //       $httpBackend.when('GET', /\.html$/).respond('');
    -  //     })
    -  //   );
    -
    -  describe('When querying prometheus with one target using query editor target spec', async () => {
    -    var results;
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    // Interval alignment with step
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -
    -    beforeEach(async () => {
    -      let response = {
    -        data: {
    -          status: 'success',
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                values: [[60, '3846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should return series list', async () => {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    -    var results;
    -    var start = 60;
    -    var end = 360;
    -    var step = 60;
    -
    -    var query = {
    -      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -              },
    -              {
    -                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -                values: [[start + step * 2, '4846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should be same length', function() {
    -      expect(results.data.length).toBe(2);
    -      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    -      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    -    });
    -
    -    it('should fill null until first datapoint in response', function() {
    -      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    -      expect(results.data[0].datapoints[0][0]).toBe(null);
    -      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    -      expect(results.data[0].datapoints[1][0]).toBe(3846);
    -    });
    -    it('should fill null after last datapoint in response', function() {
    -      var length = (end - start) / step + 1;
    -      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    -      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    -      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    -      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    -    });
    -    it('should fill null at gap between series', function() {
    -      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    -      expect(results.data[0].datapoints[2][0]).toBe(null);
    -      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    -      expect(results.data[1].datapoints[1][0]).toBe(null);
    -      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    -      expect(results.data[1].datapoints[3][0]).toBe(null);
    -    });
    -  });
    -  describe('When querying prometheus with one target and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'vector',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                value: [123, '3846'],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -  describe('When performing annotationQuery', function() {
    -    var results;
    -
    -    var options = {
    -      annotation: {
    -        expr: 'ALERTS{alertstate="firing"}',
    -        tagKeys: 'job',
    -        titleFormat: '{{alertname}}',
    -        textFormat: '{{instance}}',
    -      },
    -      range: {
    -        from: time({ seconds: 63 }),
    -        to: time({ seconds: 123 }),
    -      },
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: {
    -                  __name__: 'ALERTS',
    -                  alertname: 'InstanceDown',
    -                  alertstate: 'firing',
    -                  instance: 'testinstance',
    -                  job: 'testjob',
    -                },
    -                values: [[123, '1']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.annotationQuery(options).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should return annotation list', function() {
    -      //   ctx.$rootScope.$apply();
    -      expect(results.length).toBe(1);
    -      expect(results[0].tags).toContain('testjob');
    -      expect(results[0].title).toBe('InstanceDown');
    -      expect(results[0].text).toBe('testinstance');
    -      expect(results[0].time).toBe(123 * 1000);
    -    });
    -  });
    -
    -  describe('When resultFormat is table and instant = true', function() {
    -    var results;
    -    // var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'vector',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                value: [123, '3846'],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should return result', () => {
    -      expect(results).not.toBe(null);
    -    });
    -  });
    -
    -  describe('The "step" query parameter', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        data: {
    -          resultType: 'matrix',
    -          result: [],
    -        },
    -      },
    -    };
    -
    -    it('should be min interval when greater than auto interval', async () => {
    -      let query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -
    -    it('step should never go below 1', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '100ms',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -
    -    it('should be auto interval when greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should result in querying fewer than 11000 data points', async () => {
    -      var query = {
    -        // 6 hour range
    -        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '1s',
    -      };
    -      var end = 7 * 60 * 60;
    -      var start = 60 * 60;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should not apply min interval when interval * intervalFactor greater', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      // times get rounded up to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should apply min interval when interval * intervalFactor smaller', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should apply intervalFactor to auto interval when greater', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      // times get aligned to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should be determined by the 11000 data points limit when too small', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -  });
    -
    -  describe('The __interval and __interval_ms template variables', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        data: {
    -          resultType: 'matrix',
    -          result: [],
    -        },
    -      },
    -    };
    -
    -    it('should be unchanged when auto interval is greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=10';
    -
    -      templateSrv.replace = jest.fn(str => str);
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '10s',
    -          value: '10s',
    -        },
    -        __interval_ms: {
    -          text: 10000,
    -          value: 10000,
    -        },
    -      });
    -    });
    -    it('should be min interval when it is greater than auto interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=10';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should account for intervalFactor', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=0&end=500&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '10s',
    -          value: '10s',
    -        },
    -        __interval_ms: {
    -          text: 10000,
    -          value: 10000,
    -        },
    -      });
    -
    -      expect(query.scopedVars.__interval.text).toBe('10s');
    -      expect(query.scopedVars.__interval.value).toBe('10s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    -    });
    -    it('should be interval * intervalFactor when greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=50&end=450&step=50';
    -
    -      templateSrv.replace = jest.fn(str => str);
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should be min interval when greater than interval * intervalFactor', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=15';
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=' +
    -        start +
    -        '&end=' +
    -        end +
    -        '&step=60';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -  });
    -});
    -
    -describe('PrometheusDatasource for POST', function() {
    -  //   var ctx = new helpers.ServiceTestContext();
    -  let instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'POST' },
    -  };
    -
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  //   beforeEach(
    -  //     // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -  //     //   ctx.$q = $q;
    -  //     //   ctx.$httpBackend = $httpBackend;
    -  //     //   ctx.$rootScope = $rootScope;
    -  //     //   ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    -  //     //   $httpBackend.when('GET', /\.html$/).respond('');
    -  //     // })
    -  //   );
    -
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query_range';
    -    var dataExpected = {
    -      query: 'test{job="testjob"}',
    -      start: 1 * 60,
    -      end: 3 * 60,
    -      step: 60,
    -    };
    -    var query = {
    -      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                values: [[2 * 60, '3846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('POST');
    -      expect(res.url).toBe(urlExpected);
    -      expect(res.data).toEqual(dataExpected);
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -});
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    index b8b2b50f59095..f60af583f4515 100644
    --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    @@ -246,3 +246,797 @@ describe('PrometheusDatasource', () => {
         });
       });
     });
    +
    +const SECOND = 1000;
    +const MINUTE = 60 * SECOND;
    +const HOUR = 60 * MINUTE;
    +
    +const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    +
    +let ctx = {};
    +let instanceSettings = {
    +  url: 'proxied',
    +  directUrl: 'direct',
    +  user: 'test',
    +  password: 'mupp',
    +  jsonData: { httpMethod: 'GET' },
    +};
    +let backendSrv = {
    +  datasourceRequest: jest.fn(),
    +};
    +
    +let templateSrv = {
    +  replace: jest.fn(str => str),
    +};
    +
    +let timeSrv = {
    +  timeRange: () => {
    +    return { to: { diff: () => 2000 }, from: '' };
    +  },
    +};
    +
    +describe('PrometheusDatasource', function() {
    +  describe('When querying prometheus with one target using query editor target spec', async () => {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    // Interval alignment with step
    +    var urlExpected =
    +      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    +
    +    beforeEach(async () => {
    +      let response = {
    +        data: {
    +          status: 'success',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[60, '3846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', async () => {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When querying prometheus with one target which return multiple series', function() {
    +    var results;
    +    var start = 60;
    +    var end = 360;
    +    var step = 60;
    +
    +    var query = {
    +      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +              },
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +                values: [[start + step * 2, '4846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should be same length', function() {
    +      expect(results.data.length).toBe(2);
    +      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    +      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    +    });
    +
    +    it('should fill null until first datapoint in response', function() {
    +      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    +      expect(results.data[0].datapoints[0][0]).toBe(null);
    +      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[0].datapoints[1][0]).toBe(3846);
    +    });
    +    it('should fill null after last datapoint in response', function() {
    +      var length = (end - start) / step + 1;
    +      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    +      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    +      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    +      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    +    });
    +    it('should fill null at gap between series', function() {
    +      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    +      expect(results.data[0].datapoints[2][0]).toBe(null);
    +      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[1].datapoints[1][0]).toBe(null);
    +      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    +      expect(results.data[1].datapoints[3][0]).toBe(null);
    +    });
    +  });
    +  describe('When querying prometheus with one target and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When performing annotationQuery', function() {
    +    var results;
    +
    +    var options = {
    +      annotation: {
    +        expr: 'ALERTS{alertstate="firing"}',
    +        tagKeys: 'job',
    +        titleFormat: '{{alertname}}',
    +        textFormat: '{{instance}}',
    +      },
    +      range: {
    +        from: time({ seconds: 63 }),
    +        to: time({ seconds: 123 }),
    +      },
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: {
    +                  __name__: 'ALERTS',
    +                  alertname: 'InstanceDown',
    +                  alertstate: 'firing',
    +                  instance: 'testinstance',
    +                  job: 'testjob',
    +                },
    +                values: [[123, '1']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.annotationQuery(options).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should return annotation list', function() {
    +      expect(results.length).toBe(1);
    +      expect(results[0].tags).toContain('testjob');
    +      expect(results[0].title).toBe('InstanceDown');
    +      expect(results[0].text).toBe('testinstance');
    +      expect(results[0].time).toBe(123 * 1000);
    +    });
    +  });
    +
    +  describe('When resultFormat is table and instant = true', function() {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should return result', () => {
    +      expect(results).not.toBe(null);
    +    });
    +  });
    +
    +  describe('The "step" query parameter', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be min interval when greater than auto interval', async () => {
    +      let query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('step should never go below 1', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '100ms',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('should be auto interval when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should result in querying fewer than 11000 data points', async () => {
    +      var query = {
    +        // 6 hour range
    +        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '1s',
    +      };
    +      var end = 7 * 60 * 60;
    +      var start = 60 * 60;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not apply min interval when interval * intervalFactor greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      // times get rounded up to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply min interval when interval * intervalFactor smaller', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply intervalFactor to auto interval when greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      // times get aligned to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should be determined by the 11000 data points limit when too small', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +  });
    +
    +  describe('The __interval and __interval_ms template variables', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be unchanged when auto interval is greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
    +
    +      templateSrv.replace = jest.fn(str => str);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +    });
    +    it('should be min interval when it is greater than auto interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should account for intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be interval * intervalFactor when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=50&end=450&step=50';
    +
    +      templateSrv.replace = jest.fn(str => str);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should be min interval when greater than interval * intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=15';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=' +
    +        start +
    +        '&end=' +
    +        end +
    +        '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +  });
    +});
    +
    +describe('PrometheusDatasource for POST', function() {
    +  //   var ctx = new helpers.ServiceTestContext();
    +  let instanceSettings = {
    +    url: 'proxied',
    +    directUrl: 'direct',
    +    user: 'test',
    +    password: 'mupp',
    +    jsonData: { httpMethod: 'POST' },
    +  };
    +
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query_range';
    +    var dataExpected = {
    +      query: 'test{job="testjob"}',
    +      start: 1 * 60,
    +      end: 3 * 60,
    +      step: 60,
    +    };
    +    var query = {
    +      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[2 * 60, '3846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('POST');
    +      expect(res.url).toBe(urlExpected);
    +      expect(res.data).toEqual(dataExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +});
    
    From 951b623bd23ca1aa43833e2898876579c8417370 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 14:27:45 +0200
    Subject: [PATCH 104/104] Change to arrow functions
    
    ---
     .../prometheus/specs/datasource.jest.ts       | 66 +++++++++----------
     1 file changed, 33 insertions(+), 33 deletions(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    index f60af583f4515..aeca8d6919134 100644
    --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    @@ -150,49 +150,49 @@ describe('PrometheusDatasource', () => {
         });
       });
     
    -  describe('alignRange', function() {
    -    it('does not modify already aligned intervals with perfect step', function() {
    +  describe('alignRange', () => {
    +    it('does not modify already aligned intervals with perfect step', () => {
           const range = alignRange(0, 3, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(3);
         });
    -    it('does modify end-aligned intervals to reflect number of steps possible', function() {
    +    it('does modify end-aligned intervals to reflect number of steps possible', () => {
           const range = alignRange(1, 6, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
    -    it('does align intervals that are a multiple of steps', function() {
    +    it('does align intervals that are a multiple of steps', () => {
           const range = alignRange(1, 4, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
    -    it('does align intervals that are not a multiple of steps', function() {
    +    it('does align intervals that are not a multiple of steps', () => {
           const range = alignRange(1, 5, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
       });
     
    -  describe('Prometheus regular escaping', function() {
    -    it('should not escape non-string', function() {
    +  describe('Prometheus regular escaping', () => {
    +    it('should not escape non-string', () => {
           expect(prometheusRegularEscape(12)).toEqual(12);
         });
    -    it('should not escape simple string', function() {
    +    it('should not escape simple string', () => {
           expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression');
         });
    -    it("should escape '", function() {
    +    it("should escape '", () => {
           expect(prometheusRegularEscape("looking'glass")).toEqual("looking\\\\'glass");
         });
    -    it('should escape multiple characters', function() {
    +    it('should escape multiple characters', () => {
           expect(prometheusRegularEscape("'looking'glass'")).toEqual("\\\\'looking\\\\'glass\\\\'");
         });
       });
     
    -  describe('Prometheus regexes escaping', function() {
    -    it('should not escape simple string', function() {
    +  describe('Prometheus regexes escaping', () => {
    +    it('should not escape simple string', () => {
           expect(prometheusSpecialRegexEscape('cryptodepression')).toEqual('cryptodepression');
         });
    -    it('should escape $^*+?.()\\', function() {
    +    it('should escape $^*+?.()\\', () => {
           expect(prometheusSpecialRegexEscape("looking'glass")).toEqual("looking\\\\'glass");
           expect(prometheusSpecialRegexEscape('looking{glass')).toEqual('looking\\\\{glass');
           expect(prometheusSpecialRegexEscape('looking}glass')).toEqual('looking\\\\}glass');
    @@ -208,7 +208,7 @@ describe('PrometheusDatasource', () => {
           expect(prometheusSpecialRegexEscape('looking)glass')).toEqual('looking\\\\)glass');
           expect(prometheusSpecialRegexEscape('looking\\glass')).toEqual('looking\\\\\\\\glass');
         });
    -    it('should escape multiple special characters', function() {
    +    it('should escape multiple special characters', () => {
           expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?');
         });
       });
    @@ -275,7 +275,7 @@ let timeSrv = {
       },
     };
     
    -describe('PrometheusDatasource', function() {
    +describe('PrometheusDatasource', () => {
       describe('When querying prometheus with one target using query editor target spec', async () => {
         var results;
         var query = {
    @@ -310,7 +310,7 @@ describe('PrometheusDatasource', function() {
           });
         });
     
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
    @@ -320,7 +320,7 @@ describe('PrometheusDatasource', function() {
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
       });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    +  describe('When querying prometheus with one target which return multiple series', () => {
         var results;
         var start = 60;
         var end = 360;
    @@ -360,26 +360,26 @@ describe('PrometheusDatasource', function() {
           });
         });
     
    -    it('should be same length', function() {
    +    it('should be same length', () => {
           expect(results.data.length).toBe(2);
           expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
           expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
         });
     
    -    it('should fill null until first datapoint in response', function() {
    +    it('should fill null until first datapoint in response', () => {
           expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
           expect(results.data[0].datapoints[0][0]).toBe(null);
           expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
           expect(results.data[0].datapoints[1][0]).toBe(3846);
         });
    -    it('should fill null after last datapoint in response', function() {
    +    it('should fill null after last datapoint in response', () => {
           var length = (end - start) / step + 1;
           expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
           expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
           expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
           expect(results.data[0].datapoints[length - 1][0]).toBe(null);
         });
    -    it('should fill null at gap between series', function() {
    +    it('should fill null at gap between series', () => {
           expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
           expect(results.data[0].datapoints[2][0]).toBe(null);
           expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    @@ -388,7 +388,7 @@ describe('PrometheusDatasource', function() {
           expect(results.data[1].datapoints[3][0]).toBe(null);
         });
       });
    -  describe('When querying prometheus with one target and instant = true', function() {
    +  describe('When querying prometheus with one target and instant = true', () => {
         var results;
         var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
         var query = {
    @@ -420,17 +420,17 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
       });
    -  describe('When performing annotationQuery', function() {
    +  describe('When performing annotationQuery', () => {
         var results;
     
         var options = {
    @@ -475,7 +475,7 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    -    it('should return annotation list', function() {
    +    it('should return annotation list', () => {
           expect(results.length).toBe(1);
           expect(results[0].tags).toContain('testjob');
           expect(results[0].title).toBe('InstanceDown');
    @@ -484,7 +484,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('When resultFormat is table and instant = true', function() {
    +  describe('When resultFormat is table and instant = true', () => {
         var results;
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    @@ -520,7 +520,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('The "step" query parameter', function() {
    +  describe('The "step" query parameter', () => {
         var response = {
           status: 'success',
           data: {
    @@ -717,7 +717,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('The __interval and __interval_ms template variables', function() {
    +  describe('The __interval and __interval_ms template variables', () => {
         var response = {
           status: 'success',
           data: {
    @@ -982,7 +982,7 @@ describe('PrometheusDatasource', function() {
       });
     });
     
    -describe('PrometheusDatasource for POST', function() {
    +describe('PrometheusDatasource for POST', () => {
       //   var ctx = new helpers.ServiceTestContext();
       let instanceSettings = {
         url: 'proxied',
    @@ -992,7 +992,7 @@ describe('PrometheusDatasource for POST', function() {
         jsonData: { httpMethod: 'POST' },
       };
     
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    +  describe('When querying prometheus with one target using query editor target spec', () => {
         var results;
         var urlExpected = 'proxied/api/v1/query_range';
         var dataExpected = {
    @@ -1028,13 +1028,13 @@ describe('PrometheusDatasource for POST', function() {
             results = data;
           });
         });
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('POST');
           expect(res.url).toBe(urlExpected);
           expect(res.data).toEqual(dataExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });