diff --git a/.build.ps1 b/.build.ps1 deleted file mode 100644 index bcd506c49f874..0000000000000 --- a/.build.ps1 +++ /dev/null @@ -1,354 +0,0 @@ -# Copyright 2020 PingCAP, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# See the License for the specific language governing permissions and -# limitations under the License. - -[CmdletBinding()] -param ( - [ValidateSet('Community', 'Enterprise')] - [string]$Edition = (property Edition Community), - - [string]$GO = (property GO 'go'), - [string]$TargetOS = (property GOOS ''), - [string]$Target = $IsWindows ? 'bin\tidb-server.exe' : 'bin\tidb-server', - [string]$BuildFlags = '', - - [switch]$Race = $false, - [switch]$Check = $false, - - [uint]$P = (property P 8), - [string]$ExplainTests = '' -) - -if (-not (Test-Path (Join-Path 'tools' 'bin'))) { - New-Item -ItemType Directory (Join-Path 'tools' 'bin') | Out-Null -} - -$gopath = Resolve-Path (& $GO env -json | ConvertFrom-Json).GOPATH -$packages = & $GO list ./... | Where-Object { $_ -notlike '*cmd*' } -$directories = $packages -replace 'github.com/pingcap/tidb/', '' -$sources = Get-ChildItem -Path $directories -Filter '*.go' - -$testFlags = @('-X', "'github.com/pingcap/tidb/config.checkBeforeDropLDFlag=1'") - -function Get-ToolPath ($name, $dir = (Resolve-Path 'tools\bin')) { - $exe = $IsWindows ? "$name.exe" : $name - Join-Path $dir -ChildPath $exe -} - -$tools = @{ - Path = Resolve-Path tools/check - Mod = Resolve-Path tools/check/go.mod - - FailPoint = @{ - Src = 'github.com/pingcap/failpoint/failpoint-ctl' - Path = (Get-ToolPath 'failpoint-ctl') - } - Errcheck = @{ - Src = 'github.com/kisielk/errcheck' - Path = (Get-ToolPath 'errcheck') - } - Revive = @{ - Src = 'github.com/mgechev/revive' - Path = (Get-ToolPath 'revive') - } - Unconvert = @{ - Src = 'github.com/mdempsky/unconvert' - Path = (Get-ToolPath 'unconvert') - } - StaticCheck = @{ - Src = 'honnef.co/go/tools/cmd/staticcheck' - Path = (Get-ToolPath 'staticcheck' (Join-Path $gopath bin)) - } - Linter = @{ - Path = (Get-ToolPath 'golangci-lint') - } -} - -task BuildFailPoint -Inputs go.mod -Outputs $tools.FailPoint.Path { - exec { & $GO build -o $tools.FailPoint.Path $tools.FailPoint.Src } -} - -function Enable-FailPoint { - Get-ChildItem . -Recurse -Directory | Where-Object { $_ -cnotmatch '(\.git|tools|\.idea)' } | - ForEach-Object { exec { & $tools.FailPoint.Path enable $_ } } -} - -task EnableFailPoint BuildFailPoint, { - Enable-FailPoint -} - -function Disable-FailPoint { - Get-ChildItem . -Recurse -Directory | Where-Object { $_ -cnotmatch '(\.git|tools|\.idea)' } | - ForEach-Object { exec { & $tools.FailPoint.Path disable $_ } } -} - -task DisableFailPoint BuildFailPoint, { - Disable-FailPoint -} - -task BuildErrcheck -Inputs $tools.Mod -Outputs $tools.Errcheck.Path { - Set-Location $tools.Path - exec { & $GO build -o $tools.Errcheck.Path $tools.Errcheck.Src } -} - -task RunErrcheck BuildErrcheck, { - $exclude = Join-Path $tools.Path errcheck_excludes.txt - exec { & $tools.Errcheck.Path -exclude $exclude -ignoretests -blank $packages } -} - -task BuildRevive -Inputs $tools.Mod -Outputs $tools.Revive.Path { - Set-Location $tools.Path - exec { & $GO build -o $tools.Revive.Path $tools.Revive.Src } -} - -task RunRevive BuildRevive, { - $config = Join-Path $tools.Path revive.toml - exec { & $tools.Revive.Path -formatter friendly -config $config $packages } -} - -task BuildUnconvert -Inputs $tools.Mod -Outputs $tools.Unconvert.Path { - Set-Location $tools.Path - exec { & $GO build -o $tools.Unconvert.Path $tools.Unconvert.Src } -} - -task RunUnconvert BuildUnconvert, { - exec { & $tools.Unconvert.Path ./... } -} - -task BuildStaticCheck -Inputs go.mod -Outputs $tools.StaticCheck.Path { - exec { & $GO get $tools.StaticCheck.Src } -} - -task RunStaticCheck BuildStaticCheck, { - exec { & $tools.StaticCheck.Path ./... } -} - -task DownloadLinter -If (-not (Test-Path $tools.Linter.Path)) { - $goEnv = exec { & $GO env -json } | ConvertFrom-Json - $version = '1.30.0' - $os = $goEnv.GOHOSTOS - $arch = $goEnv.GOHOSTARCH - $ext = ($os -eq 'windows') ? 'zip' : 'tar.gz' - - $dir = Join-Path ([System.IO.Path]::GetTempPath()) ([System.Guid]::NewGuid()) - New-Item -ItemType Directory -Path $dir | Out-Null - - $url = "https://github.com/golangci/golangci-lint/releases/download/v$version/golangci-lint-$version-$os-$arch.$ext" - $archive = Join-Path $dir "download.$ext" - Write-Output "downloading $url" - Invoke-WebRequest $url -OutFile $archive - - $IsWindows ? (Expand-Archive $archive $dir) : (exec { tar -C $dir -xzf $archive }) - $bin = $IsWindows ? 'golangci-lint.exe' : 'golangci-lint' - Copy-Item (Join-Path $dir "golangci-lint-$version-$os-$arch\$bin") $tools.Linter.Path - Remove-Item -Force -Recurse $dir -} - -task RunLinter DownloadLinter, { - exec { & $tools.Linter.Path run -v --disable-all --deadline=3m --enable=misspell --enable=ineffassign --enable=varcheck $directories } -} - -task GoModTidy { - exec { & $GO mod tidy } -} - -task CheckTestSuite { - Get-ChildItem . -Directory | Where-Object { $_ -notmatch 'vendor' } | - ForEach-Object { Get-ChildItem $_ -Recurse -Filter '*_test.go' } | - ForEach-Object { Select-String $_ -Pattern 'type (test.*Suite.*) struct' -CaseSensitive } | - ForEach-Object { - $dir = Split-Path $_.Path - $suite = $_.Matches.Groups[1].Value - $sources = Get-ChildItem $dir -Recurse -Filter '*_test.go' - $enabled = $sources | ForEach-Object { - Select-String $_ -Pattern "_ = (check\.)?(Suite|SerialSuites)\((&?$suite{|new\($suite\))" -CaseSensitive - } - if (-not $enabled) { - $hasCase = $sources | ForEach-Object { Select-String $_ -Pattern "func \((.* )?\*?$suite\) Test" -CaseSensitive } - if ($hasCase) { - throw "$suite in $dir is not enabled" - } - } - } -} - -task RunGoVet { - exec { go vet -all $packages } -} - -task FormatCode { - if ((Get-Content tidb-server\main.go -Raw) -match "\r\n$") { - Write-Build Red "Gofmt is skiped due to it will reformat CRLF. Please check your git core.autocrlf setting." - } - else { - exec { gofmt -s -l -w $directories } - } - Set-Location (Resolve-Path cmd\importcheck) - exec { & $GO run . ../.. } -} - -# Synopsis: Check code quality with some analyzers. -task Check FormatCode, RunErrCheck, RunUnconvert, RunRevive, GoModTidy, CheckTestSuite, RunLinter, RunGoVet, RunStaticCheck - -# Synopsis: Build TiDB server. -task Build -Inputs ($sources + 'go.mod', 'go.sum') -Outputs $Target { - $build = @('build', '-tags', 'codes', $BuildFlags) - if ($Race) { - $build += '-race' - } - - $version = (git describe --tags --dirty --always) - $gitHash = (git rev-parse HEAD) - $gitBranch = (git rev-parse --abbrev-ref HEAD) - $buildTime = (Get-Date -UFormat '+%Y-%m-%d %I:%M:%S') - - $flags = @( - '-X', "'github.com/pingcap/parser/mysql.TiDBReleaseVersion=$version'", - '-X', "'github.com/pingcap/tidb/util/versioninfo.TiDBGitHash=$gitHash'", - '-X', "'github.com/pingcap/tidb/util/versioninfo.TiDBGitBranch=$gitBranch'", - '-X', "'github.com/pingcap/tidb/util/versioninfo.TiDBBuildTS=$buildTime'" - '-X', "'github.com/pingcap/tidb/util/versioninfo.TiDBEdition=$Edition'" - ) - if ($Check) { - $flags += $testFlags - } - - $build += @( - '-ldflags', "`"$flags`"", - '-o', $Target, 'tidb-server/main.go' - ) - - $Task.Data = $env:GOOS - $env:GOOS = $TargetOS - exec { & $GO $build } -} -Done { - $env:GOOS = $Task.Data -} - -task BuildExplainTest -Inputs (Get-ChildItem cmd\explaintest\* -Include '*.go') -Outputs (Get-ToolPath 'explain_test' 'cmd\explaintest') { - Set-Location cmd\explaintest - $output = $IsWindows ? 'explain_test.exe' : 'explain_test' - exec { & $GO build -o $output } -} - -# Synopsis: Run explain tests. -task ExplainTest -If (-not ((Get-Content cmd\explaintest\r\explain.result -Raw) -match "\r\n$")) Build, BuildExplainTest, { - function Find-Prot { - while ($true) { - $port = Get-Random -Minimum 4000 -Maximum 65535 - $listener = [System.Net.Sockets.TcpListener]$port; - try { - $listener.Start() - return $port - } - catch { - continue - } - finally { - $listener.Stop() - } - } - } - - $tidbPath = Resolve-Path $Target - Set-Location cmd\explaintest - - $explaintest = if ($IsWindows) { '.\explain_test.exe' } else { Resolve-Path '.\explain_test' } - exec { & $GO build -o $explaintest } - - $port, $status = (Find-Prot), (Find-Prot) - - $logPath = 'explain-test.out' - $tidbArgs = @('-P', "$port", '-status', "$status", '-config', 'config.toml', '-store', 'mocktikv') - $tidb = Start-Process -FilePath $tidbPath -ArgumentList $tidbArgs -RedirectStandardOutput $logPath -NoNewWindow -PassThru - Write-Output "TiDB server(Handle: $($tidb.Handle)) started" - $Task.Data = $tidb - Start-Sleep 5 - - if ($ExplainTests -eq '') { - Write-Output 'run all explain test cases' - } - else { - Write-Output "run explain test cases: $ExplainTests" - } - exec { & $explaintest -port "$port" -status "$status" --log-level=error $ExplainTests } -} -Done { - if ($Task.Data) { - $Task.Data.Kill() - } -} - -# Synopsis: Run unit tests. -task GoTest BuildFailPoint, { - Enable-FailPoint - $Task.Data = @{ - logLevel = $env:log_level - tz = $env:TZ - } - $env:log_level = 'fatal' - $env:TZ = 'Asia/Shanghai' - exec { & $GO test -p $P -ldflags "`"$testFlags`"" -cover $packages '-check.p' true '-check.timeout' 4s } -} -Done { - Disable-FailPoint - $env:log_level = $Task.Data.logLevel - $env:TZ = $Task.Data.tz -} - -# Synopsis: Run tests with race detecter enabled. -task GoRaceTest BuildFailPoint, { - Enable-FailPoint - $Task.Data = @{ - logLevel = $env:log_level - tz = $env:TZ - } - $env:log_level = 'debug' - $env:TZ = 'Asia/Shanghai' - exec { & $GO test -p $P -timeout 20m -race $packages } -} -Done { - Disable-FailPoint - $env:log_level = $Task.Data.logLevel - $env:TZ = $Task.Data.tz -} - -# Synopsis: Run tests with leak checker enabled. -task GoLeakTest BuildFailPoint, { - Enable-FailPoint - $Task.Data = @{ - logLevel = $env:log_level - tz = $env:TZ - } - $env:log_level = 'debug' - $env:TZ = 'Asia/Shanghai' - exec { & $GO test -p $P -tags leak $packages } -} -Done { - Disable-FailPoint - $env:log_level = $Task.Data.logLevel - $env:TZ = $Task.Data.tz -} - -# Synopsis: Ensure generated code is up to date. -task GoGenerate { - exec { & $GO generate ./... } - if (exec { git status -s } | ForEach-Object { (-split $_)[1] } | - ForEach-Object { Select-String $_ -Pattern '^# Code generated .* DO NOT EDIT\.$' -CaseSensitive }) { - throw 'Your commit is changed after running go generate ./..., it should not happen.' - } -} - -# Synopsis: Run common tests. -task Test ExplainTest, GoTest, GoGenerate - -# Synopsis: Check and Test. -task Dev Check, Test - -# Synopsis: Build TiDB server. -task . Build diff --git a/.codecov.yml b/.codecov.yml index f2482097c10a9..07178456a6804 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -43,4 +43,5 @@ ignore: - "executor/seqtest/.*" - "metrics/.*" - "expression/generator/.*" + - "br/pkg/mock/.*" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000000..ccc42ebaa1ee0 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +# Require review from domain experts when the PR modified significant config files. +/sessionctx/variable @pingcap/tidb-configuration-reviewer +/config/config.toml.example @pingcap/tidb-configuration-reviewer +/session/bootstrap.go @pingcap/tidb-configuration-reviewer diff --git a/.github/licenserc.yml b/.github/licenserc.yml new file mode 100644 index 0000000000000..52976b36f8686 --- /dev/null +++ b/.github/licenserc.yml @@ -0,0 +1,31 @@ +header: + license: + spdx-id: Apache-2.0 + copyright-owner: PingCAP, Inc. + paths-ignore: + - 'docs/' + - 'br/' + - '.gitignore' + - '.gitattributes' + - '.golangci.yml' + - '.golangci_br.yml' + - 'LICENSES/' + - '**/*.md' + - '**/*.json' + - '**/*.pem' + - '**/*.crt' + - '**/*.test' + - '**/*.result' + - '**/*.example' + - '.codecov.yml' + - 'errors.toml' + - 'Jenkinsfile' + - '.editorconfig' + - 'hooks/pre-commit' + - '**/go.mod' + - '**/go.sum' + - 'LICENSE' + - '.github/' + - 'parser/' + - 'dumpling/' + comment: on-failure diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 6af9bfc5b25e4..b8f7a3b6eeca5 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,4 +1,6 @@ - ### What problem does this PR solve? + +Please create an issue first to describe the problem. -Problem Summary: +There MUST be one line starting with "Issue Number: " and +linking the relevant issues via the "close" or "ref". -### What is changed and how it works? +For more info, check https://pingcap.github.io/tidb-dev-guide/contribute-to-tidb/contribute-code.html#referring-to-an-issue. + +--> -Proposal: [xxx](url) +Issue Number: close #xxx -What's Changed: +Problem Summary: -How it Works: +### What is changed and how it works? -### Check List +### Check List Tests @@ -43,6 +49,10 @@ Documentation - [ ] Contains experimental features - [ ] Changes MySQL compatibility -### Release note +### Release note + + -- +```release-note +None +``` diff --git a/.github/workflows/assign_project.yml b/.github/workflows/assign_project.yml deleted file mode 100644 index 87a8ee8b3f70b..0000000000000 --- a/.github/workflows/assign_project.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: Auto Assign Project Local - -on: - issues: - types: [labeled] -env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - -jobs: - assign_one_project: - runs-on: ubuntu-latest - name: Assign to One Project - steps: - - name: Run issues assignment to project SIG Runtime Kanban - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'component/coprocessor') || - contains(github.event.issue.labels.*.name, 'sig/executor') || - contains(github.event.issue.labels.*.name, 'component/expression') - with: - project: 'https://github.com/pingcap/tidb/projects/38' - column_name: 'Issue Backlog: Need Triage' - - name: Run issues assignment to project SIG Planner Kanban - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'sig/planner') || - contains(github.event.issue.labels.*.name, 'component/statistics') || - contains(github.event.issue.labels.*.name, 'component/bindinfo') - with: - project: 'https://github.com/pingcap/tidb/projects/39' - column_name: 'Issue Backlog: Need Triage' - - name: Run issues assignment to Feature Request Kanban - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'type/feature-request') - with: - project: 'https://github.com/pingcap/tidb/projects/41' - column_name: 'Need Triage' - - name: Run issues assignment to Robust test - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'component/test') - with: - project: 'https://github.com/pingcap/tidb/projects/32' - column_name: 'TODO/Help Wanted' - - name: Run issues assignment to project UT Coverage - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'type/UT-coverage') - with: - project: 'https://github.com/pingcap/tidb/projects/44' - column_name: 'To do' - - name: Run issues assignment to project SIG DDL Kanban - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'sig/infra') || - contains(github.event.issue.labels.*.name, 'component/binlog') || - contains(github.event.issue.labels.*.name, 'component/charset') || - contains(github.event.issue.labels.*.name, 'component/infoschema') || - contains(github.event.issue.labels.*.name, 'component/parser') - with: - project: 'https://github.com/pingcap/tidb/projects/40' - column_name: 'Issue Backlog: Need Triage' diff --git a/.github/workflows/br_compatible_test.yml b/.github/workflows/br_compatible_test.yml new file mode 100644 index 0000000000000..aeccfb0a2eff0 --- /dev/null +++ b/.github/workflows/br_compatible_test.yml @@ -0,0 +1,72 @@ +name: BR / Compatibility Test + +on: + push: + # merged git action + branches: + - master + - 'release-[0-9].[0-9]*' + paths: + - 'br/**' + - '!**.html' + - '!**.md' + - '!CNAME' + - '!LICENSE' + - '!br/docs/**' + - '!br/tests/**' + - '!br/docker/**' + # disable pull request only keep the merge action since it is very costly to run those tests + # pull_request: + # branches: + # - master + # - 'release-[0-9].[0-9]*' + # paths: + # - 'br/**' + # - '!**.html' + # - '!**.md' + # - '!CNAME' + # - '!LICENSE' + # - '!br/docs/**' + # - '!br/tests/**' + # - '!br/docker/**' + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + check: + runs-on: ubuntu-latest + timeout-minutes: 25 + steps: + + - uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Generate compatibility test backup data + timeout-minutes: 15 + run: sh br/compatibility/prepare_backup.sh + + - name: Start server + run: | + TAG=nightly PORT_SUFFIX=1 docker-compose -f br/compatibility/backup_cluster.yaml rm -s -v + TAG=nightly PORT_SUFFIX=1 docker-compose -f br/compatibility/backup_cluster.yaml build + TAG=nightly PORT_SUFFIX=1 docker-compose -f br/compatibility/backup_cluster.yaml up --remove-orphans -d + TAG=nightly PORT_SUFFIX=1 docker-compose -f br/compatibility/backup_cluster.yaml exec -T control make build_br + TAG=nightly PORT_SUFFIX=1 docker-compose -f br/compatibility/backup_cluster.yaml exec -T control br/tests/run_compatible.sh run + + - name: Collect component log + if: ${{ failure() }} + run: | + tar czvf ${{ github.workspace }}/logs.tar.gz /tmp/br/docker/backup_logs/* + + - uses: actions/upload-artifact@v2 + if: ${{ failure() }} + with: + name: logs + path: ${{ github.workspace }}/logs.tar.gz diff --git a/.github/workflows/compile_br.yaml b/.github/workflows/compile_br.yaml new file mode 100644 index 0000000000000..acfbc2d27bad5 --- /dev/null +++ b/.github/workflows/compile_br.yaml @@ -0,0 +1,94 @@ +name: BR & Lightning +on: + push: + branches: + - master + - 'release-[0-9].[0-9]*' + paths: + - 'br/**' + - '!**.html' + - '!**.md' + - '!CNAME' + - '!LICENSE' + - '!br/docs/**' + - '!br/tests/**' + - '!br/docker/**' +#change trigger policy + pull_request: + types: + - labeled # <-- + branches: + - master + - 'release-[0-9].[0-9]*' + paths: + - 'br/**' + - '!**.html' + - '!**.md' + - '!CNAME' + - '!LICENSE' + - '!br/docs/**' + - '!br/tests/**' + - '!br/docker/**' +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + compile-windows: + if: github.event_name == 'push' || github.event_name == 'pull_request' && github.event.label.name == 'action/run-br-cross-platform-build' + name: Compile for Windows job + runs-on: windows-latest + steps: + - uses: actions/checkout@v2.1.0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Run build + run: make build_tools + + compile: + if: github.event_name == 'pull_request' && github.event.label.name == 'action/run-br-cross-platform-build' + name: Compile for ${{ matrix.os }} / ${{ matrix.target}} + + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: macos-latest + target: x86_64-apple-darwin + + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + + steps: + - uses: actions/checkout@v2.1.0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Run build + run: make build_tools + + compile-freebsd: + if: github.event_name == 'pull_request' && github.event.label.name == 'action/run-br-cross-platform-build' + name: Compile for FreeBSD job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.1.0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Compile for FreeBSD + run: GOOS=freebsd make build_tools diff --git a/.github/workflows/dumpling_integration_test.yml b/.github/workflows/dumpling_integration_test.yml new file mode 100644 index 0000000000000..8390ef961f44d --- /dev/null +++ b/.github/workflows/dumpling_integration_test.yml @@ -0,0 +1,145 @@ +name: Dumpling +on: + push: + branches: + - master + - release-* + paths: + - 'dumpling/**' + - 'br/pkg/storage/**' + - 'br/pkg/utils/**' + - 'br/pkg/summary/**' + - 'store/helper/**' + - 'tablecodec/**' + - 'util/codec/**' + - 'parser/model/**' + pull_request: + branches: + - master + - release-* + paths: + - 'dumpling/**' + - 'br/pkg/storage/**' + - 'br/pkg/utils/**' + - 'br/pkg/summary/**' + - 'store/helper/**' + - 'tablecodec/**' + - 'util/codec/**' + - 'parser/model/**' + +# See: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency. +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + integration-test-mysql-5735: + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: true + services: + mysql: + image: mysql:5.7.35 + env: + MYSQL_ALLOW_EMPTY_PASSWORD: yes + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + steps: + - uses: actions/checkout@v2 + - name: Shutdown Ubuntu MySQL (SUDO) + run: sudo service mysql stop # Shutdown the Default MySQL, "sudo" is necessary, please not remove it + - name: Set up Go 1.16 + uses: actions/setup-go@v2 + with: + go-version: 1.16 + - uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: Get dependencies + run: go mod download + - name: Download dependencies + run: sh dumpling/install.sh + - name: Integration test + run: make dumpling_integration_test VERBOSE="true" + - name: Set up tmate session + if: ${{ failure() }} + uses: mxschmitt/action-tmate@v3 + + integration-test-mysql-8026: + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: true + services: + mysql: + image: mysql:8.0.26 + env: + MYSQL_ALLOW_EMPTY_PASSWORD: yes + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + steps: + - uses: actions/checkout@v2 + - name: Shutdown Ubuntu MySQL (SUDO) + run: sudo service mysql stop # Shutdown the Default MySQL, "sudo" is necessary, please not remove it + - name: Set up Go 1.16 + uses: actions/setup-go@v2 + with: + go-version: 1.16 + - uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: Get dependencies + run: go mod download + - name: Download dependencies + run: sh dumpling/install.sh + - name: Integration test + run: make dumpling_integration_test VERBOSE="true" + - name: Set up tmate session + if: ${{ failure() }} + uses: mxschmitt/action-tmate@v3 + + integration-test-mysql-8022: + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: true + services: + mysql: + image: mysql:8.0.22 + env: + MYSQL_ALLOW_EMPTY_PASSWORD: yes + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + steps: + - uses: actions/checkout@v2 + - name: Shutdown Ubuntu MySQL (SUDO) + run: sudo service mysql stop # Shutdown the Default MySQL, "sudo" is necessary, please not remove it + - name: Set up Go 1.16 + uses: actions/setup-go@v2 + with: + go-version: 1.16 + - uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: Get dependencies + run: go mod download + - name: Download dependencies + run: sh dumpling/install.sh + - name: Integration test + run: make dumpling_integration_test VERBOSE="true" + - name: Set up tmate session + if: ${{ failure() }} + uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/issue_assigned.yml b/.github/workflows/issue_assigned.yml deleted file mode 100644 index 90735cf64b36c..0000000000000 --- a/.github/workflows/issue_assigned.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Auto Assign Project Local - -on: - issues: - types: [assigned] -env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - -jobs: - assign_to_project_column: - runs-on: ubuntu-latest - name: Assign to Project Column - steps: - - name: Run issues assignment to Robust test - uses: srggrs/assign-one-project-github-action@1.2.0 - if: | - contains(github.event.issue.labels.*.name, 'component/test') - with: - project: 'https://github.com/pingcap/tidb/projects/32' - column_name: 'Task Assigned' diff --git a/.github/workflows/license-checker.yml b/.github/workflows/license-checker.yml new file mode 100644 index 0000000000000..e156c1b2b4c5a --- /dev/null +++ b/.github/workflows/license-checker.yml @@ -0,0 +1,23 @@ +name: License checker + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + check-license: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Check License Header + uses: apache/skywalking-eyes@main + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + log: info + config: .github/licenserc.yml diff --git a/.gitignore b/.gitignore index f1d8731e36d6b..ec864762068de 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ coverage.out *.iml *.swp *.log +*.test.bin tags profile.coverprofile explain_test @@ -19,3 +20,12 @@ vendor /_tools/ .DS_Store .vscode +bench_daily.json +coverage.txt +var +fix.sql +export-20*/ +*-coverage.xml +*-junit-report.xml +# Files generated when testing +out diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000000000..ce37ff0de15b3 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,41 @@ +run: + timeout: 10m +linters: + disable-all: true + enable: + - misspell + - ineffassign + - typecheck + - varcheck + - unused + - structcheck + - deadcode + - gosimple + - goimports + - errcheck + - staticcheck + - stylecheck + - gosec + - asciicheck + - bodyclose + - exportloopref + - rowserrcheck + - unconvert + - makezero + - durationcheck + - prealloc + - predeclared + +linters-settings: + staticcheck: + checks: ["S1002","S1004","S1007","S1009","S1010","S1012","S1019","S1020","S1021","S1024","S1030","SA2*","SA3*","SA4009","SA5*","SA6000","SA6001","SA6005", "-SA2002"] + stylecheck: + checks: ["-ST1003"] +issues: + exclude-rules: + - path: _test\.go + linters: + - errcheck + - gosec + - rowserrcheck + - makezero diff --git a/Dockerfile b/Dockerfile index a739919a752b3..154727f659357 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,6 +8,7 @@ # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. diff --git a/Makefile b/Makefile index 50ae4507a3ed2..12282d6f0749a 100644 --- a/Makefile +++ b/Makefile @@ -8,12 +8,13 @@ # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. include Makefile.common -.PHONY: all clean test gotest server dev benchkv benchraw check checklist parser tidy ddltest +.PHONY: all clean test gotest server dev benchkv benchraw check checklist parser tidy ddltest build_br build_lightning build_lightning-ctl build_dumpling ut default: server buildsucc @@ -27,51 +28,27 @@ all: dev server benchkv parser: @echo "remove this command later, when our CI script doesn't call it" -dev: checklist check test +dev: checklist check explaintest gogenerate br_unit_test test_part_parser_dev + @>&2 echo "Great, all tests passed." # Install the check tools. -check-setup:tools/bin/revive tools/bin/goword tools/bin/gometalinter tools/bin/gosec - -check: fmt errcheck unconvert lint tidy testSuite check-static vet staticcheck errdoc +check-setup:tools/bin/revive tools/bin/goword -# These need to be fixed before they can be ran regularly -check-fail: goword check-slow +check: fmt check-parallel unconvert lint tidy testSuite check-static vet errdoc fmt: @echo "gofmt (simplify)" @gofmt -s -l -w $(FILES) 2>&1 | $(FAIL_ON_STDOUT) - @cd cmd/importcheck && $(GO) run . ../.. goword:tools/bin/goword tools/bin/goword $(FILES) 2>&1 | $(FAIL_ON_STDOUT) -gosec:tools/bin/gosec - tools/bin/gosec $$($(PACKAGE_DIRECTORIES)) - check-static: tools/bin/golangci-lint - tools/bin/golangci-lint run -v --disable-all --deadline=3m \ - --enable=misspell \ - --enable=ineffassign \ - --enable=typecheck \ - --enable=varcheck \ - --enable=unused \ - --enable=structcheck \ - --enable=deadcode \ - --enable=gosimple \ - $$($(PACKAGE_DIRECTORIES)) - -check-slow:tools/bin/gometalinter tools/bin/gosec - tools/bin/gometalinter --disable-all \ - --enable errcheck \ - $$($(PACKAGE_DIRECTORIES)) - -errcheck:tools/bin/errcheck - @echo "errcheck" - @GO111MODULE=on tools/bin/errcheck -exclude ./tools/check/errcheck_excludes.txt -ignoretests -blank $(PACKAGES) + GO111MODULE=on CGO_ENABLED=0 tools/bin/golangci-lint run -v $$($(PACKAGE_DIRECTORIES)) --config .golangci.yml unconvert:tools/bin/unconvert - @echo "unconvert check" - @GO111MODULE=on tools/bin/unconvert ./... + @echo "unconvert check(skip check the generated or copied code in lightning)" + @GO111MODULE=on tools/bin/unconvert $(UNCONVERT_PACKAGES) gogenerate: @echo "go generate ./..." @@ -83,15 +60,11 @@ errdoc:tools/bin/errdoc-gen lint:tools/bin/revive @echo "linting" - @tools/bin/revive -formatter friendly -config tools/check/revive.toml $(FILES) + @tools/bin/revive -formatter friendly -config tools/check/revive.toml $(FILES_TIDB_TESTS) vet: @echo "vet" - $(GO) vet -all $(PACKAGES) 2>&1 | $(FAIL_ON_STDOUT) - -staticcheck: - $(GO) get honnef.co/go/tools/cmd/staticcheck - $(STATICCHECK) ./... + $(GO) vet -all $(PACKAGES_TIDB_TESTS) 2>&1 | $(FAIL_ON_STDOUT) tidy: @echo "go mod tidy" @@ -101,6 +74,13 @@ testSuite: @echo "testSuite" ./tools/check/check_testSuite.sh +check-parallel: +# Make sure no tests are run in parallel to prevent possible unstable tests. +# See https://github.com/pingcap/tidb/pull/30692. + @! find . -name "*_test.go" -not -path "./vendor/*" -print0 | \ + xargs -0 grep -F -n "t.Parallel()" || \ + ! echo "Error: all the go tests should be run in serial." + clean: failpoint-disable $(GO) clean -i ./... @@ -110,7 +90,24 @@ test: test_part_1 test_part_2 test_part_1: checklist explaintest -test_part_2: gotest gogenerate +test_part_2: test_part_parser gotest gogenerate br_unit_test dumpling_unit_test + +test_part_parser: parser_yacc test_part_parser_dev + +test_part_parser_dev: parser_fmt parser_unit_test + +parser_yacc: + @cd parser && mv parser.go parser.go.committed && make parser && diff -u parser.go.committed parser.go && rm parser.go.committed + +parser_fmt: + @cd parser && make fmt + +parser_unit_test: + @cd parser && make test + +test_part_br: br_unit_test br_integration_test + +test_part_dumpling: dumpling_unit_test dumpling_integration_test explaintest: server_check @cd cmd/explaintest && ./run-tests.sh -s ../../bin/tidb-server @@ -118,34 +115,38 @@ explaintest: server_check ddltest: @cd cmd/ddltest && $(GO) test -o ../../bin/ddltest -c -upload-coverage: SHELL:=/bin/bash -upload-coverage: -ifeq ("$(TRAVIS_COVERAGE)", "1") - mv overalls.coverprofile coverage.txt - bash <(curl -s https://codecov.io/bash) -endif +CLEAN_UT_BINARY := find . -name '*.test.bin'| xargs rm + +ut: tools/bin/ut tools/bin/xprog failpoint-enable + tools/bin/ut $(X) || { $(FAILPOINT_DISABLE); exit 1; } + @$(FAILPOINT_DISABLE) + @$(CLEAN_UT_BINARY) gotest: failpoint-enable -ifeq ("$(TRAVIS_COVERAGE)", "1") - @echo "Running in TRAVIS_COVERAGE mode." - $(GO) get github.com/go-playground/overalls - @export log_level=info; \ - $(OVERALLS) -project=github.com/pingcap/tidb \ - -covermode=count \ - -ignore='.git,vendor,cmd,docs,tests,LICENSES' \ - -concurrency=4 \ - -- -coverpkg=./... \ - || { $(FAILPOINT_DISABLE); exit 1; } -else @echo "Running in native mode." @export log_level=info; export TZ='Asia/Shanghai'; \ - $(GOTEST) -ldflags '$(TEST_LDFLAGS)' $(EXTRA_TEST_ARGS) -cover $(PACKAGES) -check.p true -check.timeout 4s || { $(FAILPOINT_DISABLE); exit 1; } -endif + $(GOTEST) -ldflags '$(TEST_LDFLAGS)' $(EXTRA_TEST_ARGS) -timeout 20m -cover $(PACKAGES_TIDB_TESTS) -coverprofile=coverage.txt -check.p true > gotest.log || { $(FAILPOINT_DISABLE); cat 'gotest.log'; exit 1; } @$(FAILPOINT_DISABLE) +gotest_in_verify_ci: tools/bin/xprog tools/bin/ut failpoint-enable + @echo "Running gotest_in_verify_ci" + @mkdir -p $(TEST_COVERAGE_DIR) + @export TZ='Asia/Shanghai'; \ + tools/bin/ut --junitfile "$(TEST_COVERAGE_DIR)/tidb-junit-report.xml" --coverprofile "$(TEST_COVERAGE_DIR)/tidb_cov.unit_test.out" --except unstable.txt || { $(FAILPOINT_DISABLE); exit 1; } + @$(FAILPOINT_DISABLE) + @$(CLEAN_UT_BINARY) + +gotest_unstable_in_verify_ci: tools/bin/xprog tools/bin/ut failpoint-enable + @echo "Running gotest_in_verify_ci" + @mkdir -p $(TEST_COVERAGE_DIR) + @export TZ='Asia/Shanghai'; \ + tools/bin/ut --junitfile "$(TEST_COVERAGE_DIR)/tidb-junit-report.xml" --coverprofile "$(TEST_COVERAGE_DIR)/tidb_cov.unit_test.out" --only unstable.txt || { $(FAILPOINT_DISABLE); exit 1; } + @$(FAILPOINT_DISABLE) + @$(CLEAN_UT_BINARY) + race: failpoint-enable @export log_level=debug; \ - $(GOTEST) -timeout 20m -race $(PACKAGES) || { $(FAILPOINT_DISABLE); exit 1; } + $(GOTEST) -timeout 25m -race $(PACKAGES) || { $(FAILPOINT_DISABLE); exit 1; } @$(FAILPOINT_DISABLE) leak: failpoint-enable @@ -160,6 +161,13 @@ else CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o '$(TARGET)' tidb-server/main.go endif +server_debug: +ifeq ($(TARGET), "") + CGO_ENABLED=1 $(GOBUILD) -gcflags="all=-N -l" $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o bin/tidb-server-debug tidb-server/main.go +else + CGO_ENABLED=1 $(GOBUILD) -gcflags="all=-N -l" $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o '$(TARGET)' tidb-server/main.go +endif + server_check: ifeq ($(TARGET), "") $(GOBUILD) $(RACE_FLAG) -ldflags '$(CHECK_LDFLAGS)' -o bin/tidb-server tidb-server/main.go @@ -204,6 +212,14 @@ failpoint-disable: tools/bin/failpoint-ctl # Restoring gofail failpoints... @$(FAILPOINT_DISABLE) +tools/bin/ut: tools/check/ut.go + cd tools/check; \ + $(GO) build -o ../bin/ut ut.go + +tools/bin/xprog: tools/check/xprog.go + cd tools/check; \ + $(GO) build -o ../bin/xprog xprog.go + tools/bin/megacheck: tools/check/go.mod cd tools/check; \ $(GO) build -o ../bin/megacheck honnef.co/go/tools/cmd/megacheck @@ -216,18 +232,6 @@ tools/bin/goword: tools/check/go.mod cd tools/check; \ $(GO) build -o ../bin/goword github.com/chzchzchz/goword -tools/bin/gometalinter: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/gometalinter gopkg.in/alecthomas/gometalinter.v3 - -tools/bin/gosec: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/gosec github.com/securego/gosec/cmd/gosec - -tools/bin/errcheck: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/errcheck github.com/kisielk/errcheck - tools/bin/unconvert: tools/check/go.mod cd tools/check; \ $(GO) build -o ../bin/unconvert github.com/mdempsky/unconvert @@ -243,6 +247,10 @@ tools/bin/errdoc-gen: tools/check/go.mod tools/bin/golangci-lint: curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b ./tools/bin v1.41.1 +tools/bin/vfsgendev: tools/check/go.mod + cd tools/check; \ + $(GO) build -o ../bin/vfsgendev github.com/shurcooL/vfsgen/cmd/vfsgendev + # Usage: # # $ make vectorized-bench VB_FILE=Time VB_FUNC=builtinCurrentDateSig @@ -267,5 +275,156 @@ endif # Usage: # make bench-daily TO=/path/to/file.json bench-daily: - cd ./session && \ - go test -run TestBenchDaily --date `git log -n1 --date=unix --pretty=format:%cd` --commit `git log -n1 --pretty=format:%h` --outfile $(TO) + go test github.com/pingcap/tidb/session -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/executor -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/tablecodec -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/expression -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/util/rowcodec -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/util/codec -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/distsql -run TestBenchDaily -bench Ignore --outfile bench_daily.json + go test github.com/pingcap/tidb/util/benchdaily -run TestBenchDaily -bench Ignore \ + -date `git log -n1 --date=unix --pretty=format:%cd` \ + -commit `git log -n1 --pretty=format:%h` \ + -outfile $(TO) + +build_tools: build_br build_lightning build_lightning-ctl + +br_web: + @cd br/web && npm install && npm run build + +build_br: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o $(BR_BIN) br/cmd/br/*.go + +build_lightning_for_web: + CGO_ENABLED=1 $(GOBUILD) -tags dev $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_BIN) br/cmd/tidb-lightning/main.go + +build_lightning: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_BIN) br/cmd/tidb-lightning/main.go + +build_lightning-ctl: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_CTL_BIN) br/cmd/tidb-lightning-ctl/main.go + +build_for_br_integration_test: + @make failpoint-enable + ($(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(BR_BIN).test \ + github.com/pingcap/tidb/br/cmd/br && \ + $(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(LIGHTNING_BIN).test \ + github.com/pingcap/tidb/br/cmd/tidb-lightning && \ + $(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(LIGHTNING_CTL_BIN).test \ + github.com/pingcap/tidb/br/cmd/tidb-lightning-ctl && \ + $(GOBUILD) $(RACE_FLAG) -o bin/locker br/tests/br_key_locked/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/gc br/tests/br_z_gc_safepoint/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/oauth br/tests/br_gcs/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/rawkv br/tests/br_rawkv/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/parquet_gen br/tests/lightning_checkpoint_parquet/*.go \ + ) || (make failpoint-disable && exit 1) + @make failpoint-disable + +br_unit_test: export ARGS=$$($(BR_PACKAGES)) +br_unit_test: + @make failpoint-enable + @export TZ='Asia/Shanghai'; + $(GOTEST) $(RACE_FLAG) -ldflags '$(LDFLAGS)' -tags leak $(ARGS) -coverprofile=coverage.txt || ( make failpoint-disable && exit 1 ) + @make failpoint-disable +br_unit_test_in_verify_ci: export ARGS=$$($(BR_PACKAGES)) +br_unit_test_in_verify_ci: tools/bin/gotestsum + @make failpoint-enable + @export TZ='Asia/Shanghai'; + @mkdir -p $(TEST_COVERAGE_DIR) + CGO_ENABLED=1 tools/bin/gotestsum --junitfile "$(TEST_COVERAGE_DIR)/br-junit-report.xml" -- $(RACE_FLAG) -ldflags '$(LDFLAGS)' \ + -tags leak $(ARGS) -coverprofile="$(TEST_COVERAGE_DIR)/br_cov.unit_test.out" || ( make failpoint-disable && exit 1 ) + @make failpoint-disable + +br_integration_test: br_bins build_br build_for_br_integration_test + @cd br && tests/run.sh + +br_compatibility_test_prepare: + @cd br && tests/run_compatible.sh prepare + +br_compatibility_test: + @cd br && tests/run_compatible.sh run + +# There is no FreeBSD environment for GitHub actions. So cross-compile on Linux +# but that doesn't work with CGO_ENABLED=1, so disable cgo. The reason to have +# cgo enabled on regular builds is performance. +ifeq ("$(GOOS)", "freebsd") + GOBUILD = CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '$(LDFLAGS)' +endif + +br_coverage: + tools/bin/gocovmerge "$(TEST_DIR)"/cov.* | grep -vE ".*.pb.go|.*__failpoint_binding__.go" > "$(TEST_DIR)/all_cov.out" +ifeq ("$(JenkinsCI)", "1") + tools/bin/goveralls -coverprofile=$(TEST_DIR)/all_cov.out -service=jenkins-ci -repotoken $(COVERALLS_TOKEN) +else + go tool cover -html "$(TEST_DIR)/all_cov.out" -o "$(TEST_DIR)/all_cov.html" + grep -F ' $@ + @rm tmp_parser.go + +data_parsers: tools/bin/vfsgendev br/pkg/lightning/mydump/parser_generated.go br_web + PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOPATH)/src" br/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. + tools/bin/vfsgendev -source='"github.com/pingcap/tidb/br/pkg/lightning/web".Res' && mv res_vfsdata.go br/pkg/lightning/web/ + +build_dumpling: + $(DUMPLING_GOBUILD) $(RACE_FLAG) -tags codes -o $(DUMPLING_BIN) dumpling/cmd/dumpling/main.go + +dumpling_unit_test: export DUMPLING_ARGS=$$($(DUMPLING_PACKAGES)) +dumpling_unit_test: failpoint-enable + $(DUMPLING_GOTEST) $(RACE_FLAG) -coverprofile=coverage.txt -covermode=atomic -tags leak $(DUMPLING_ARGS) || ( make failpoint-disable && exit 1 ) + @make failpoint-disable +dumpling_unit_test_in_verify_ci: export DUMPLING_ARGS=$$($(DUMPLING_PACKAGES)) +dumpling_unit_test_in_verify_ci: failpoint-enable tools/bin/gotestsum + @mkdir -p $(TEST_COVERAGE_DIR) + CGO_ENABLED=1 tools/bin/gotestsum --junitfile "$(TEST_COVERAGE_DIR)/dumpling-junit-report.xml" -- -tags leak $(DUMPLING_ARGS) \ + $(RACE_FLAG) -coverprofile="$(TEST_COVERAGE_DIR)/dumpling_cov.unit_test.out" || ( make failpoint-disable && exit 1 ) + @make failpoint-disable + +dumpling_integration_test: dumpling_bins failpoint-enable build_dumpling + @make failpoint-disable + ./dumpling/tests/run.sh $(CASE) + +dumpling_tools: + @echo "install dumpling tools..." + @cd dumpling/tools && make + +dumpling_tidy: + @echo "go mod tidy" + GO111MODULE=on go mod tidy + git diff --exit-code go.mod go.sum dumpling/tools/go.mod dumpling/tools/go.sum + +dumpling_bins: + @which bin/tidb-server + @which bin/minio + @which bin/tidb-lightning + @which bin/sync_diff_inspector + +tools/bin/gotestsum: tools/check/go.mod + cd tools/check && $(GO) build -o ../bin/gotestsum gotest.tools/gotestsum + +generate_grafana_scripts: + @cd metrics/grafana && mv tidb_summary.json tidb_summary.json.committed && ./generate_json.sh && diff -u tidb_summary.json.committed tidb_summary.json && rm tidb_summary.json.committed diff --git a/Makefile.common b/Makefile.common index 8885dc6c3a979..8ea85d6a24694 100644 --- a/Makefile.common +++ b/Makefile.common @@ -8,6 +8,7 @@ # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. @@ -43,15 +44,23 @@ endif ARCH := "`uname -s`" LINUX := "Linux" MAC := "Darwin" -PACKAGE_LIST := go list ./...| grep -vE "cmd|github.com\/pingcap\/tidb\/tests" + +PACKAGE_LIST := go list ./... +PACKAGE_LIST_TIDB_TESTS := go list ./... | grep -vE "github.com\/pingcap\/tidb\/br|github.com\/pingcap\/tidb\/cmd|github.com\/pingcap\/tidb\/dumpling" PACKAGES ?= $$($(PACKAGE_LIST)) +PACKAGES_TIDB_TESTS ?= $$($(PACKAGE_LIST_TIDB_TESTS)) PACKAGE_DIRECTORIES := $(PACKAGE_LIST) | sed 's|github.com/pingcap/$(PROJECT)/||' +PACKAGE_DIRECTORIES_TIDB_TESTS := $(PACKAGE_LIST_TIDB_TESTS) | sed 's|github.com/pingcap/$(PROJECT)/||' FILES := $$(find $$($(PACKAGE_DIRECTORIES)) -name "*.go") +FILES_TIDB_TESTS := $$(find $$($(PACKAGE_DIRECTORIES_TIDB_TESTS)) -name "*.go") + +UNCONVERT_PACKAGES_LIST := go list ./...| grep -vE "lightning\/checkpoints|lightning\/manual|lightning\/common" +UNCONVERT_PACKAGES := $$($(UNCONVERT_PACKAGES_LIST)) -FAILPOINT_ENABLE := $$(find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl enable) -FAILPOINT_DISABLE := $$(find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl disable) +FAILPOINT_ENABLE := find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl enable +FAILPOINT_DISABLE := find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl disable -LDFLAGS += -X "github.com/pingcap/parser/mysql.TiDBReleaseVersion=$(shell git describe --tags --dirty --always)" +LDFLAGS += -X "github.com/pingcap/tidb/parser/mysql.TiDBReleaseVersion=$(shell git describe --tags --dirty --always)" LDFLAGS += -X "github.com/pingcap/tidb/util/versioninfo.TiDBBuildTS=$(shell date -u '+%Y-%m-%d %H:%M:%S')" LDFLAGS += -X "github.com/pingcap/tidb/util/versioninfo.TiDBGitHash=$(shell git rev-parse HEAD)" LDFLAGS += -X "github.com/pingcap/tidb/util/versioninfo.TiDBGitBranch=$(shell git rev-parse --abbrev-ref HEAD)" @@ -78,3 +87,29 @@ CHECK_FLAG = ifeq ("$(WITH_CHECK)", "1") CHECK_FLAG = $(TEST_LDFLAGS) endif + +BR_PKG := github.com/pingcap/tidb/br +BR_PACKAGES := go list ./...| grep "github.com\/pingcap\/tidb\/br" +BR_PACKAGE_DIRECTORIES := $(BR_PACKAGES) | sed 's|github.com/pingcap/$(PROJECT)/||' +LIGHTNING_BIN := bin/tidb-lightning +LIGHTNING_CTL_BIN := bin/tidb-lightning-ctl +BR_BIN := bin/br +TEST_DIR := /tmp/backup_restore_test + + +DUMPLING_PKG := github.com/pingcap/tidb/dumpling +DUMPLING_PACKAGES := go list ./... | grep 'github.com\/pingcap\/tidb\/dumpling' +DUMPLING_PACKAGE_DIRECTORIES := $(DUMPLING_PACKAGES) | sed 's|github.com/pingcap/$(PROJECT)/||' +DUMPLING_BIN := bin/dumpling +DUMPLING_CHECKER := awk '{ print } END { if (NR > 0) { exit 1 } }' + +DUMPLING_LDFLAGS += -X "github.com/pingcap/tidb/dumpling/cli.ReleaseVersion=$(shell git describe --tags --dirty='-dev')" +DUMPLING_LDFLAGS += -X "github.com/pingcap/tidb/dumpling/cli.BuildTimestamp=$(shell date -u '+%Y-%m-%d %I:%M:%S')" +DUMPLING_LDFLAGS += -X "github.com/pingcap/tidb/dumpling/cli.GitHash=$(shell git rev-parse HEAD)" +DUMPLING_LDFLAGS += -X "github.com/pingcap/tidb/dumpling/cli.GitBranch=$(shell git rev-parse --abbrev-ref HEAD)" +DUMPLING_LDFLAGS += -X "github.com/pingcap/tidb/dumpling/cli.GoVersion=$(shell go version)" + +DUMPLING_GOBUILD := CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '$(DUMPLING_LDFLAGS)' +DUMPLING_GOTEST := CGO_ENABLED=1 GO111MODULE=on go test -ldflags '$(DUMPLING_LDFLAGS)' + +TEST_COVERAGE_DIR := "test_coverage" diff --git a/README.md b/README.md index b4fcc35237280..4abbae9bed908 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,9 @@ TiDB ("Ti" stands for Titanium) is an open-source NewSQL database that supports TiDB acts like it is a MySQL 5.7 server to your applications. You can continue to use all of the existing MySQL client libraries, and in many cases, you will not need to change a single line of code in your application. Because TiDB is built from scratch, not a MySQL fork, please check out the list of [known compatibility differences](https://docs.pingcap.com/tidb/stable/mysql-compatibility). -- __Distributed Transactions with Strong Consistency__ +- __Distributed Transactions__ - TiDB internally shards table into small range-based chunks that we refer to as "Regions". Each Region defaults to approximately 100 MiB in size, and TiDB uses a Two-phase commit internally to ensure that Regions are maintained in a transactionally consistent way. + TiDB internally shards table into small range-based chunks that we refer to as "Regions". Each Region defaults to approximately 100 MiB in size, and TiDB uses an [optimized](https://pingcap.com/blog/async-commit-the-accelerator-for-transaction-commit-in-tidb-5.0) Two-phase commit to ensure that Regions are maintained in a transactionally consistent way. - __Cloud Native__ @@ -46,9 +46,9 @@ For more details and latest updates, see [TiDB docs](https://docs.pingcap.com/ti You can join these groups and chats to discuss and ask TiDB related questions: -- [Contributors Mailing list](https://lists.tidb.io/g/contributors) +- [TiDB Internals Forum](https://internals.tidb.io/) - [Slack Channel](https://slack.tidb.io/invite?team=tidb-community&channel=everyone&ref=pingcap-tidb) -- [Chinese Forum](https://asktug.com) +- [TiDB User Group Forum (Chinese)](https://asktug.com) In addition, you may enjoy following: @@ -60,37 +60,19 @@ For support, please contact [PingCAP](http://bit.ly/contact_us_via_github). ## Quick start -### To start using TiDB - -See [Quick Start Guide](https://pingcap.com/docs/stable/quick-start-with-tidb/). - -### To start developing TiDB - -If you want to build TiDB right away, there are two options: +### To start using TiDB Cloud -**You have a working [Go environment](https://golang.org/doc/install).** +We provide TiDB Cloud - a fully-managed Database as a Service for you. -``` -mkdir -p $GOPATH/src/github.com/pingcap -cd $GOPATH/src/github.com/pingcap -git clone https://github.com/pingcap/tidb.git -cd tidb -make -cd bin && ./tidb-server -``` +See [TiDB Cloud Quick Start](https://docs.pingcap.com/tidbcloud/public-preview/tidb-cloud-quickstart). -**You have a working [Docker environment](https://docs.docker.com/engine/).** +### To start using TiDB -``` -docker pull pingcap/tidb:latest -docker run --name tidb-server -d -p 4000:4000 pingcap/tidb:latest -``` +See [Quick Start Guide](https://docs.pingcap.com/tidb/stable/quick-start-with-tidb). -Now you can use official mysql client to connect to TiDB. +### To start developing TiDB -``` -mysql -h 127.0.0.1 -P 4000 -u root -D test --prompt="tidb> " -``` +See [Get Started](https://pingcap.github.io/tidb-dev-guide/get-started/introduction.html) chapter of [TiDB Dev Guide](https://pingcap.github.io/tidb-dev-guide/index.html). ## Contributing @@ -98,7 +80,7 @@ The [community repository](https://github.com/pingcap/community) hosts all infor [contribution-map](https://github.com/pingcap/tidb-map/blob/master/maps/contribution-map.md#tidb-is-an-open-source-distributed-htap-database-compatible-with-the-mysql-protocol) -Contributions are welcomed and greatly appreciated. See [Contribution Guide](https://github.com/pingcap/community/blob/master/contributors/README.md) for details on submitting patches and the contribution workflow. For more contributing information, click on the contributor icon above. +Contributions are welcomed and greatly appreciated. See [Contribution to TiDB](https://pingcap.github.io/tidb-dev-guide/contribute-to-tidb/introduction.html) for details on typical contribution workflows. For more contributing information, click on the contributor icon above. ## Adopters @@ -109,10 +91,6 @@ View the current list of in-production TiDB adopters [here](https://docs.pingcap - [English](https://pingcap.com/case-studies) - [简体中文](https://pingcap.com/cases-cn/) -## Roadmap - -Read the [Roadmap](https://pingcap.com/docs/ROADMAP). - ## Architecture ![architecture](./docs/architecture.png) diff --git a/bindinfo/bind_cache.go b/bindinfo/bind_cache.go new file mode 100644 index 0000000000000..1742a39972410 --- /dev/null +++ b/bindinfo/bind_cache.go @@ -0,0 +1,213 @@ +// Copyright 2022 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo + +import ( + "sync" + + "github.com/cznic/mathutil" + "github.com/pingcap/tidb/sessionctx/variable" + "github.com/pingcap/tidb/util/hack" + "github.com/pingcap/tidb/util/kvcache" + "github.com/pingcap/tidb/util/memory" +) + +// bindCache uses the LRU cache to store the bindRecord. +// The key of the LRU cache is original sql, the value is a slice of BindRecord. +// Note: The bindCache should be accessed with lock. +type bindCache struct { + lock sync.Mutex + cache *kvcache.SimpleLRUCache + memCapacity int64 + memTracker *memory.Tracker // track memory usage. +} + +type bindCacheKey string + +func (key bindCacheKey) Hash() []byte { + return hack.Slice(string(key)) +} + +func calcBindCacheKVMem(key bindCacheKey, value []*BindRecord) int64 { + var valMem int64 + for _, bindRecord := range value { + valMem += int64(bindRecord.size()) + } + return int64(len(key.Hash())) + valMem +} + +func newBindCache() *bindCache { + // since bindCache controls the memory usage by itself, set the capacity of + // the underlying LRUCache to max to close its memory control + cache := kvcache.NewSimpleLRUCache(mathutil.MaxUint, 0, 0) + c := bindCache{ + cache: cache, + memCapacity: variable.MemQuotaBindCache.Load(), + memTracker: memory.NewTracker(memory.LabelForBindCache, -1), + } + return &c +} + +// get gets a cache item according to cache key. It's not thread-safe. +// Note: Only other functions of the bindCache file can use this function. +// Don't use this function directly in other files in bindinfo package. +// The return value is not read-only, but it is only can be used in other functions which are also in the bind_cache.go. +func (c *bindCache) get(key bindCacheKey) []*BindRecord { + value, hit := c.cache.Get(key) + if !hit { + return nil + } + typedValue := value.([]*BindRecord) + return typedValue +} + +// set inserts an item to the cache. It's not thread-safe. +// Only other functions of the bindCache can use this function. +func (c *bindCache) set(key bindCacheKey, value []*BindRecord) bool { + mem := calcBindCacheKVMem(key, value) + if mem > c.memCapacity { // ignore this kv pair if its size is too large + return false + } + bindRecords := c.get(key) + if bindRecords != nil { + // Remove the origin key-value pair. + mem -= calcBindCacheKVMem(key, bindRecords) + } + for mem+c.memTracker.BytesConsumed() > c.memCapacity { + evictedKey, evictedValue, evicted := c.cache.RemoveOldest() + if !evicted { + return false + } + c.memTracker.Consume(-calcBindCacheKVMem(evictedKey.(bindCacheKey), evictedValue.([]*BindRecord))) + } + c.memTracker.Consume(mem) + c.cache.Put(key, value) + return true +} + +// delete remove an item from the cache. It's not thread-safe. +// Only other functions of the bindCache can use this function. +func (c *bindCache) delete(key bindCacheKey) bool { + bindRecords := c.get(key) + if bindRecords != nil { + mem := calcBindCacheKVMem(key, bindRecords) + c.cache.Delete(key) + c.memTracker.Consume(-mem) + } + return true +} + +// GetBindRecord gets the BindRecord from the cache. +// The return value is not read-only, but it shouldn't be changed in the caller functions. +// The function is thread-safe. +func (c *bindCache) GetBindRecord(hash, normdOrigSQL, db string) *BindRecord { + c.lock.Lock() + defer c.lock.Unlock() + bindRecords := c.get(bindCacheKey(hash)) + for _, bindRecord := range bindRecords { + if bindRecord.OriginalSQL == normdOrigSQL { + return bindRecord + } + } + return nil +} + +// GetAllBindRecords return all the bindRecords from the bindCache. +// The return value is not read-only, but it shouldn't be changed in the caller functions. +// The function is thread-safe. +func (c *bindCache) GetAllBindRecords() []*BindRecord { + c.lock.Lock() + defer c.lock.Unlock() + values := c.cache.Values() + var bindRecords []*BindRecord + for _, vals := range values { + bindRecords = append(bindRecords, vals.([]*BindRecord)...) + } + return bindRecords +} + +// SetBindRecord sets the BindRecord to the cache. +// The function is thread-safe. +func (c *bindCache) SetBindRecord(hash string, meta *BindRecord) { + c.lock.Lock() + defer c.lock.Unlock() + cacheKey := bindCacheKey(hash) + metas := c.get(cacheKey) + for i := range metas { + if metas[i].OriginalSQL == meta.OriginalSQL { + metas[i] = meta + } + } + c.set(cacheKey, []*BindRecord{meta}) +} + +// RemoveBindRecord removes the BindRecord which has same originSQL with specified BindRecord. +// The function is thread-safe. +func (c *bindCache) RemoveBindRecord(hash string, meta *BindRecord) { + c.lock.Lock() + defer c.lock.Unlock() + metas := c.get(bindCacheKey(hash)) + if metas == nil { + return + } + + for i := len(metas) - 1; i >= 0; i-- { + if metas[i].isSame(meta) { + metas[i] = metas[i].remove(meta) + if len(metas[i].Bindings) == 0 { + metas = append(metas[:i], metas[i+1:]...) + } + if len(metas) == 0 { + c.delete(bindCacheKey(hash)) + return + } + } + } + c.set(bindCacheKey(hash), metas) +} + +// SetMemCapacity sets the memory capacity for the cache. +// The function is thread-safe. +func (c *bindCache) SetMemCapacity(capacity int64) { + c.lock.Lock() + defer c.lock.Unlock() + // Only change the capacity size without affecting the cached bindRecord + c.memCapacity = capacity +} + +// GetMemCapacity get the memory capacity for the cache. +// The function is thread-safe. +func (c *bindCache) GetMemCapacity() int64 { + c.lock.Lock() + defer c.lock.Unlock() + return c.memCapacity +} + +// Copy copies a new bindCache from the origin cache. +// The function is thread-safe. +func (c *bindCache) Copy() *bindCache { + c.lock.Lock() + defer c.lock.Unlock() + newCache := newBindCache() + keys := c.cache.Keys() + for _, key := range keys { + cacheKey := key.(bindCacheKey) + v := c.get(cacheKey) + bindRecords := make([]*BindRecord, len(v)) + copy(bindRecords, v) + newCache.set(cacheKey, bindRecords) + } + return newCache +} diff --git a/bindinfo/bind_cache_test.go b/bindinfo/bind_cache_test.go new file mode 100644 index 0000000000000..be5b33dc07321 --- /dev/null +++ b/bindinfo/bind_cache_test.go @@ -0,0 +1,63 @@ +// Copyright 2022 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo + +import ( + "strconv" + "strings" + "testing" + + "github.com/pingcap/tidb/sessionctx/variable" + "github.com/pingcap/tidb/util/hack" + "github.com/stretchr/testify/require" +) + +func TestBindCache(t *testing.T) { + variable.MemQuotaBindCache.Store(100) + bindCache := newBindCache() + + value := make([][]*BindRecord, 3) + key := make([]bindCacheKey, 3) + for i := 0; i < 3; i++ { + cacheKey := strings.Repeat(strconv.Itoa(i), 50) + key[i] = bindCacheKey(hack.Slice(cacheKey)) + record := &BindRecord{OriginalSQL: cacheKey, Db: ""} + value[i] = []*BindRecord{record} + + require.Equal(t, int64(100), calcBindCacheKVMem(key[i], value[i])) + } + + ok := bindCache.set(key[0], value[0]) + require.True(t, ok) + result := bindCache.get(key[0]) + require.NotNil(t, result) + + ok = bindCache.set(key[1], value[1]) + require.True(t, ok) + result = bindCache.get(key[1]) + require.NotNil(t, result) + + ok = bindCache.set(key[2], value[2]) + require.True(t, ok) + result = bindCache.get(key[2]) + require.NotNil(t, result) + + // Both key[0] and key[1] are not in the cache + result = bindCache.get(key[0]) + require.Nil(t, result) + + result = bindCache.get(key[1]) + require.Nil(t, result) +} diff --git a/bindinfo/cache.go b/bindinfo/bind_record.go similarity index 83% rename from bindinfo/cache.go rename to bindinfo/bind_record.go index 46364affbd74d..5b9c9f2040b43 100644 --- a/bindinfo/cache.go +++ b/bindinfo/bind_record.go @@ -8,6 +8,7 @@ // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. @@ -17,15 +18,24 @@ import ( "time" "unsafe" - "github.com/pingcap/parser" "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" "github.com/pingcap/tidb/sessionctx" "github.com/pingcap/tidb/types" + "github.com/pingcap/tidb/util/hack" "github.com/pingcap/tidb/util/hint" ) const ( + // Enabled is the bind info's in enabled status. + // It is the same as the previous 'Using' status. + // Only use 'Enabled' status in the future, not the 'Using' status. + // The 'Using' status is preserved for compatibility. + Enabled = "enabled" + // Disabled is the bind info's in disabled status. + Disabled = "disabled" // Using is the bind info's in use status. + // The 'Using' status is preserved for compatibility. Using = "using" // deleted is the bind info's deleted status. deleted = "deleted" @@ -51,7 +61,7 @@ type Binding struct { BindSQL string // Status represents the status of the binding. It can only be one of the following values: // 1. deleted: BindRecord is deleted, can not be used anymore. - // 2. using: Binding is in the normal active mode. + // 2. enabled, using: Binding is in the normal active mode. Status string CreateTime types.Time UpdateTime types.Time @@ -72,6 +82,11 @@ func (b *Binding) isSame(rb *Binding) bool { return b.BindSQL == rb.BindSQL } +// IsBindingEnabled returns whether the binding is enabled. +func (b *Binding) IsBindingEnabled() bool { + return b.Status == Enabled || b.Status == Using +} + // SinceUpdateTime returns the duration since last update time. Export for test. func (b *Binding) SinceUpdateTime() (time.Duration, error) { updateTime, err := b.UpdateTime.GoTime(time.Local) @@ -81,9 +96,6 @@ func (b *Binding) SinceUpdateTime() (time.Duration, error) { return time.Since(updateTime), nil } -// cache is a k-v map, key is original sql, value is a slice of BindRecord. -type cache map[string][]*BindRecord - // BindRecord represents a sql bind record retrieved from the storage. type BindRecord struct { OriginalSQL string @@ -95,16 +107,28 @@ type BindRecord struct { // HasUsingBinding checks if there are any using bindings in bind record. func (br *BindRecord) HasUsingBinding() bool { for _, binding := range br.Bindings { - if binding.Status == Using { + if binding.IsBindingEnabled() { return true } } return false } +// FindUsingBinding gets the using binding. +// There is at most one binding that can be used now +func (br *BindRecord) FindUsingBinding() *Binding { + for _, binding := range br.Bindings { + if binding.IsBindingEnabled() { + return &binding + } + } + return nil +} + // FindBinding find bindings in BindRecord. func (br *BindRecord) FindBinding(hint string) *Binding { - for _, binding := range br.Bindings { + for i := range br.Bindings { + binding := br.Bindings[i] if binding.ID == hint { return &binding } @@ -159,7 +183,8 @@ func merge(lBindRecord, rBindRecord *BindRecord) *BindRecord { return lBindRecord } result := lBindRecord.shallowCopy() - for _, rbind := range rBindRecord.Bindings { + for i := range rBindRecord.Bindings { + rbind := rBindRecord.Bindings[i] found := false for j, lbind := range lBindRecord.Bindings { if lbind.isSame(&rbind) { @@ -183,7 +208,8 @@ func (br *BindRecord) remove(deleted *BindRecord) *BindRecord { return &BindRecord{OriginalSQL: br.OriginalSQL, Db: br.Db} } result := br.shallowCopy() - for _, deletedBind := range deleted.Bindings { + for j := range deleted.Bindings { + deletedBind := deleted.Bindings[j] for i, bind := range result.Bindings { if bind.isSame(&deletedBind) { result.Bindings = append(result.Bindings[:i], result.Bindings[i+1:]...) @@ -219,8 +245,17 @@ func (br *BindRecord) isSame(other *BindRecord) bool { return br.OriginalSQL == other.OriginalSQL } +// size calculates the memory size of a BindRecord. +func (br *BindRecord) size() float64 { + mem := float64(len(hack.Slice(br.OriginalSQL)) + len(hack.Slice(br.Db))) + for _, binding := range br.Bindings { + mem += binding.size() + } + return mem +} + var statusIndex = map[string]int{ - Using: 0, + Enabled: 0, deleted: 1, Invalid: 2, } @@ -249,7 +284,7 @@ func (br *BindRecord) metrics() ([]float64, []int) { // size calculates the memory size of a bind info. func (b *Binding) size() float64 { - res := len(b.BindSQL) + len(b.Status) + 2*int(unsafe.Sizeof(b.CreateTime)) + len(b.Charset) + len(b.Collation) + res := len(b.BindSQL) + len(b.Status) + 2*int(unsafe.Sizeof(b.CreateTime)) + len(b.Charset) + len(b.Collation) + len(b.ID) return float64(res) } diff --git a/bindinfo/bind_test.go b/bindinfo/bind_test.go index 0ec409e37a06e..0704c114d2839 100644 --- a/bindinfo/bind_test.go +++ b/bindinfo/bind_test.go @@ -8,6 +8,7 @@ // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. @@ -16,67 +17,35 @@ package bindinfo_test import ( "context" "crypto/tls" - "flag" "fmt" - "os" "strconv" "testing" - "time" - . "github.com/pingcap/check" - "github.com/pingcap/parser" - "github.com/pingcap/parser/auth" - "github.com/pingcap/parser/model" - "github.com/pingcap/parser/terror" "github.com/pingcap/tidb/bindinfo" + "github.com/pingcap/tidb/config" "github.com/pingcap/tidb/domain" - "github.com/pingcap/tidb/errno" - "github.com/pingcap/tidb/kv" - "github.com/pingcap/tidb/meta/autoid" - "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" + "github.com/pingcap/tidb/parser/auth" + "github.com/pingcap/tidb/parser/model" + "github.com/pingcap/tidb/parser/terror" plannercore "github.com/pingcap/tidb/planner/core" - "github.com/pingcap/tidb/session" "github.com/pingcap/tidb/session/txninfo" - "github.com/pingcap/tidb/store/mockstore" + "github.com/pingcap/tidb/testkit" "github.com/pingcap/tidb/util" - "github.com/pingcap/tidb/util/logutil" - utilparser "github.com/pingcap/tidb/util/parser" - "github.com/pingcap/tidb/util/stmtsummary" - "github.com/pingcap/tidb/util/testkit" - "github.com/pingcap/tidb/util/testleak" - dto "github.com/prometheus/client_model/go" - "github.com/tikv/client-go/v2/testutils" + "github.com/stretchr/testify/require" ) -func TestT(t *testing.T) { - CustomVerboseFlag = true - logLevel := os.Getenv("log_level") - err := logutil.InitLogger(logutil.NewLogConfig(logLevel, logutil.DefaultLogFormat, "", logutil.EmptyFileLogConfig, false)) - if err != nil { - t.Fatal(err) - } - autoid.SetStep(5000) - TestingT(t) -} - -var _ = Suite(&testSuite{}) - -type testSuite struct { - cluster testutils.Cluster - store kv.Storage - domain *domain.Domain - *parser.Parser -} - -type mockSessionManager struct { +// mockSessionManager is a mocked session manager which is used for test. +type mockSessionManager1 struct { PS []*util.ProcessInfo } -func (msm *mockSessionManager) ShowTxnList() []*txninfo.TxnInfo { - panic("unimplemented!") +func (msm *mockSessionManager1) ShowTxnList() []*txninfo.TxnInfo { + return nil } -func (msm *mockSessionManager) ShowProcessList() map[uint64]*util.ProcessInfo { +// ShowProcessList implements the SessionManager.ShowProcessList interface. +func (msm *mockSessionManager1) ShowProcessList() map[uint64]*util.ProcessInfo { ret := make(map[uint64]*util.ProcessInfo) for _, item := range msm.PS { ret[item.ID] = item @@ -84,7 +53,7 @@ func (msm *mockSessionManager) ShowProcessList() map[uint64]*util.ProcessInfo { return ret } -func (msm *mockSessionManager) GetProcessInfo(id uint64) (*util.ProcessInfo, bool) { +func (msm *mockSessionManager1) GetProcessInfo(id uint64) (*util.ProcessInfo, bool) { for _, item := range msm.PS { if item.ID == id { return item, true @@ -93,551 +62,324 @@ func (msm *mockSessionManager) GetProcessInfo(id uint64) (*util.ProcessInfo, boo return &util.ProcessInfo{}, false } -func (msm *mockSessionManager) Kill(cid uint64, query bool) { +// Kill implements the SessionManager.Kill interface. +func (msm *mockSessionManager1) Kill(cid uint64, query bool) { } -func (msm *mockSessionManager) KillAllConnections() { +func (msm *mockSessionManager1) KillAllConnections() { } -func (msm *mockSessionManager) UpdateTLSConfig(cfg *tls.Config) { +func (msm *mockSessionManager1) UpdateTLSConfig(cfg *tls.Config) { } -func (msm *mockSessionManager) ServerID() uint64 { +func (msm *mockSessionManager1) ServerID() uint64 { return 1 } -var mockTikv = flag.Bool("mockTikv", true, "use mock tikv store in bind test") - -func (s *testSuite) SetUpSuite(c *C) { - testleak.BeforeTest() - s.Parser = parser.New() - flag.Lookup("mockTikv") - useMockTikv := *mockTikv - if useMockTikv { - store, err := mockstore.NewMockStore( - mockstore.WithClusterInspector(func(c testutils.Cluster) { - mockstore.BootstrapWithSingleStore(c) - s.cluster = c - }), - ) - c.Assert(err, IsNil) - s.store = store - session.SetSchemaLease(0) - session.DisableStats4Test() - } - bindinfo.Lease = 0 - d, err := session.BootstrapSession(s.store) - c.Assert(err, IsNil) - d.SetStatsUpdating(true) - s.domain = d -} - -func (s *testSuite) TearDownSuite(c *C) { - s.domain.Close() - s.store.Close() - testleak.AfterTest(c)() -} +func TestPrepareCacheWithBinding(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + orgEnable := plannercore.PreparedPlanCacheEnabled() + defer func() { + plannercore.SetPreparedPlanCache(orgEnable) + }() + plannercore.SetPreparedPlanCache(true) -func (s *testSuite) TearDownTest(c *C) { - tk := testkit.NewTestKit(c, s.store) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") - r := tk.MustQuery("show tables") - for _, tb := range r.Rows() { - tableName := tb[0] - tk.MustExec(fmt.Sprintf("drop table %v", tableName)) - } -} - -func (s *testSuite) cleanBindingEnv(tk *testkit.TestKit) { - tk.MustExec("delete from mysql.bind_info where source != 'builtin'") - s.domain.BindHandle().Clear() -} + tk.MustExec("drop table if exists t1, t2") + tk.MustExec("create table t1(a int, b int, c int, key idx_b(b), key idx_c(c))") + tk.MustExec("create table t2(a int, b int, c int, key idx_b(b), key idx_c(c))") -func normalizeWithDefaultDB(c *C, sql, db string) (string, string) { - testParser := parser.New() - stmt, err := testParser.ParseOneStmt(sql, "", "") - c.Assert(err, IsNil) - normalized, digest := parser.NormalizeDigest(utilparser.RestoreWithDefaultDB(stmt, "test", "")) - return normalized, digest.String() -} + // TestDMLSQLBind + tk.MustExec("prepare stmt1 from 'delete from t1 where b = 1 and c > 1';") + tk.MustExec("execute stmt1;") + require.Equal(t, "t1:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess := tk.Session().ShowProcess() + ps := []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res := tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_b(b)"), res.Rows()) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) -func (s *testSuite) TestBindParse(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("create table t(i int)") - tk.MustExec("create index index_t on t(i)") - - originSQL := "select * from `test` . `t`" - bindSQL := "select * from `test` . `t` use index(index_t)" - defaultDb := "test" - status := "using" - charset := "utf8mb4" - collation := "utf8mb4_bin" - source := bindinfo.Manual - sql := fmt.Sprintf(`INSERT INTO mysql.bind_info(original_sql,bind_sql,default_db,status,create_time,update_time,charset,collation,source) VALUES ('%s', '%s', '%s', '%s', NOW(), NOW(),'%s', '%s', '%s')`, - originSQL, bindSQL, defaultDb, status, charset, collation, source) - tk.MustExec(sql) - bindHandle := bindinfo.NewBindHandle(tk.Se) - err := bindHandle.Update(true) - c.Check(err, IsNil) - c.Check(bindHandle.Size(), Equals, 1) - - sql, hash := parser.NormalizeDigest("select * from test . t") - bindData := bindHandle.GetBindRecord(hash.String(), sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t`") - bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, "select * from `test` . `t` use index(index_t)") - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, Equals, "utf8mb4") - c.Check(bind.Collation, Equals, "utf8mb4_bin") - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) - dur, err := bind.SinceUpdateTime() - c.Assert(err, IsNil) - c.Assert(int64(dur), GreaterEqual, int64(0)) - - // Test fields with quotes or slashes. - sql = `CREATE GLOBAL BINDING FOR select * from t where i BETWEEN "a" and "b" USING select * from t use index(index_t) where i BETWEEN "a\nb\rc\td\0e" and 'x'` - tk.MustExec(sql) - tk.MustExec(`DROP global binding for select * from t use index(idx) where i BETWEEN "a\nb\rc\td\0e" and "x"`) - - // Test SetOprStmt. - tk.MustExec(`create binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()`) - tk.MustExec(`drop binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()`) - tk.MustExec(`create binding for select * from t INTERSECT select * from t using select * from t use index(index_t) INTERSECT select * from t use index()`) - tk.MustExec(`drop binding for select * from t INTERSECT select * from t using select * from t use index(index_t) INTERSECT select * from t use index()`) - tk.MustExec(`create binding for select * from t EXCEPT select * from t using select * from t use index(index_t) EXCEPT select * from t use index()`) - tk.MustExec(`drop binding for select * from t EXCEPT select * from t using select * from t use index(index_t) EXCEPT select * from t use index()`) - tk.MustExec(`create binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())`) - tk.MustExec(`drop binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())`) - - // Test Update / Delete. - tk.MustExec("create table t1(a int, b int, c int, key(b), key(c))") - tk.MustExec("create table t2(a int, b int, c int, key(b), key(c))") - tk.MustExec("create binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1, c) */ from t1 where b = 1 and c > 1") - tk.MustExec("drop binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1, c) */ from t1 where b = 1 and c > 1") - tk.MustExec("create binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1 using delete /*+ hash_join(t1, t2), use_index(t1, c) */ t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1") - tk.MustExec("drop binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1 using delete /*+ hash_join(t1, t2), use_index(t1, c) */ t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1") - tk.MustExec("create binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1, c) */ t1 set a = 1 where b = 1 and c > 1") - tk.MustExec("drop binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1, c) */ t1 set a = 1 where b = 1 and c > 1") - tk.MustExec("create binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") - tk.MustExec("drop binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") - // Test Insert / Replace. - tk.MustExec("create binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") - tk.MustExec("drop binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") - tk.MustExec("create binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") - tk.MustExec("drop binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") - err = tk.ExecToErr("create binding for insert into t1 values(1,1,1) using insert into t1 values(1,1,1)") - c.Assert(err.Error(), Equals, "create binding only supports INSERT / REPLACE INTO SELECT") - err = tk.ExecToErr("create binding for replace into t1 values(1,1,1) using replace into t1 values(1,1,1)") - c.Assert(err.Error(), Equals, "create binding only supports INSERT / REPLACE INTO SELECT") - - // Test errors. - tk.MustExec(`drop table if exists t1`) - tk.MustExec("create table t1(i int, s varchar(20))") - _, err = tk.Exec("create global binding for select * from t using select * from t1 use index for join(index_t)") - c.Assert(err, NotNil, Commentf("err %v", err)) -} + tk.MustExec("create global binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1,idx_c) */ from t1 where b = 1 and c > 1") -var testSQLs = []struct { - createSQL string - overlaySQL string - querySQL string - originSQL string - bindSQL string - dropSQL string - memoryUsage float64 -}{ - { - createSQL: "binding for select * from t where i>100 using select * from t use index(index_t) where i>100", - overlaySQL: "binding for select * from t where i>99 using select * from t use index(index_t) where i>99", - querySQL: "select * from t where i > 30.0", - originSQL: "select * from `test` . `t` where `i` > ?", - bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) WHERE `i` > 99", - dropSQL: "binding for select * from t where i>100", - memoryUsage: float64(126), - }, - { - createSQL: "binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()", - overlaySQL: "", - querySQL: "select * from t union all select * from t", - originSQL: "select * from `test` . `t` union all select * from `test` . `t`", - bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) UNION ALL SELECT * FROM `test`.`t` USE INDEX ()", - dropSQL: "binding for select * from t union all select * from t", - memoryUsage: float64(182), - }, - { - createSQL: "binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())", - overlaySQL: "", - querySQL: "(select * from t) union all (select * from t)", - originSQL: "( select * from `test` . `t` ) union all ( select * from `test` . `t` )", - bindSQL: "(SELECT * FROM `test`.`t` USE INDEX (`index_t`)) UNION ALL (SELECT * FROM `test`.`t` USE INDEX ())", - dropSQL: "binding for (select * from t) union all (select * from t)", - memoryUsage: float64(194), - }, - { - createSQL: "binding for select * from t intersect select * from t using select * from t use index(index_t) intersect select * from t use index()", - overlaySQL: "", - querySQL: "select * from t intersect select * from t", - originSQL: "select * from `test` . `t` intersect select * from `test` . `t`", - bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) INTERSECT SELECT * FROM `test`.`t` USE INDEX ()", - dropSQL: "binding for select * from t intersect select * from t", - memoryUsage: float64(182), - }, - { - createSQL: "binding for select * from t except select * from t using select * from t use index(index_t) except select * from t use index()", - overlaySQL: "", - querySQL: "select * from t except select * from t", - originSQL: "select * from `test` . `t` except select * from `test` . `t`", - bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) EXCEPT SELECT * FROM `test`.`t` USE INDEX ()", - dropSQL: "binding for select * from t except select * from t", - memoryUsage: float64(176), - }, - { - createSQL: "binding for select * from t using select /*+ use_index(t,index_t)*/ * from t", - overlaySQL: "", - querySQL: "select * from t ", - originSQL: "select * from `test` . `t`", - bindSQL: "SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t`", - dropSQL: "binding for select * from t", - memoryUsage: float64(106), - }, - { - createSQL: "binding for delete from t where i = 1 using delete /*+ use_index(t,index_t) */ from t where i = 1", - overlaySQL: "", - querySQL: "delete from t where i = 2", - originSQL: "delete from `test` . `t` where `i` = ?", - bindSQL: "DELETE /*+ use_index(`t` `index_t`)*/ FROM `test`.`t` WHERE `i` = 1", - dropSQL: "binding for delete from t where i = 1", - memoryUsage: float64(130), - }, - { - createSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1 using delete /*+ use_index(t,index_t), hash_join(t,t1) */ t, t1 from t inner join t1 on t.s = t1.s where t.i = 1", - overlaySQL: "", - querySQL: "delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 2", - originSQL: "delete `test` . `t` , `test` . `t1` from `test` . `t` join `test` . `t1` on `t` . `s` = `t1` . `s` where `t` . `i` = ?", - bindSQL: "DELETE /*+ use_index(`t` `index_t`) hash_join(`t`, `t1`)*/ `test`.`t`,`test`.`t1` FROM `test`.`t` JOIN `test`.`t1` ON `t`.`s` = `t1`.`s` WHERE `t`.`i` = 1", - dropSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1", - memoryUsage: float64(297), - }, - { - createSQL: "binding for update t set s = 'a' where i = 1 using update /*+ use_index(t,index_t) */ t set s = 'a' where i = 1", - overlaySQL: "", - querySQL: "update t set s='b' where i=2", - originSQL: "update `test` . `t` set `s` = ? where `i` = ?", - bindSQL: "UPDATE /*+ use_index(`t` `index_t`)*/ `test`.`t` SET `s`='a' WHERE `i` = 1", - dropSQL: "binding for update t set s = 'a' where i = 1", - memoryUsage: float64(144), - }, - { - createSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i using update /*+ inl_join(t1) */ t, t1 set t.s = 'a' where t.i = t1.i", - overlaySQL: "", - querySQL: "update t , t1 set t.s='b' where t.i=t1.i", - originSQL: "update ( `test` . `t` ) join `test` . `t1` set `t` . `s` = ? where `t` . `i` = `t1` . `i`", - bindSQL: "UPDATE /*+ inl_join(`t1`)*/ (`test`.`t`) JOIN `test`.`t1` SET `t`.`s`='a' WHERE `t`.`i` = `t1`.`i`", - dropSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i", - memoryUsage: float64(212), - }, - { - createSQL: "binding for insert into t1 select * from t where t.i = 1 using insert into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1", - overlaySQL: "", - querySQL: "insert into t1 select * from t where t.i = 2", - originSQL: "insert into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?", - bindSQL: "INSERT INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1", - dropSQL: "binding for insert into t1 select * from t where t.i = 1", - memoryUsage: float64(194), - }, - { - createSQL: "binding for replace into t1 select * from t where t.i = 1 using replace into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1", - overlaySQL: "", - querySQL: "replace into t1 select * from t where t.i = 2", - originSQL: "replace into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?", - bindSQL: "REPLACE INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1", - dropSQL: "binding for replace into t1 select * from t where t.i = 1", - memoryUsage: float64(196), - }, -} + tk.MustExec("execute stmt1;") + require.Equal(t, "t1:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_c(c)"), res.Rows()) + + tk.MustExec("prepare stmt2 from 'delete t1, t2 from t1 inner join t2 on t1.b = t2.b';") + tk.MustExec("execute stmt2;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "HashJoin"), res.Rows()) + tk.MustExec("execute stmt2;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) -func (s *testSuite) TestGlobalBinding(c *C) { - tk := testkit.NewTestKit(c, s.store) + tk.MustExec("create global binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b using delete /*+ inl_join(t1) */ t1, t2 from t1 inner join t2 on t1.b = t2.b") - for _, testSQL := range testSQLs { - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("drop table if exists t1") - tk.MustExec("create table t(i int, s varchar(20))") - tk.MustExec("create table t1(i int, s varchar(20))") - tk.MustExec("create index index_t on t(i,s)") + tk.MustExec("execute stmt2;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "IndexJoin"), res.Rows()) + + tk.MustExec("prepare stmt3 from 'update t1 set a = 1 where b = 1 and c > 1';") + tk.MustExec("execute stmt3;") + require.Equal(t, "t1:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_b(b)"), res.Rows()) + tk.MustExec("execute stmt3;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) - metrics.BindTotalGauge.Reset() - metrics.BindMemoryUsage.Reset() + tk.MustExec("create global binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1,idx_c) */ t1 set a = 1 where b = 1 and c > 1") - _, err := tk.Exec("create global " + testSQL.createSQL) - c.Assert(err, IsNil, Commentf("err %v", err)) + tk.MustExec("execute stmt3;") + require.Equal(t, "t1:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_c(c)"), res.Rows()) + + tk.MustExec("prepare stmt4 from 'update t1, t2 set t1.a = 1 where t1.b = t2.b';") + tk.MustExec("execute stmt4;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "HashJoin"), res.Rows()) + tk.MustExec("execute stmt4;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) - if testSQL.overlaySQL != "" { - _, err = tk.Exec("create global " + testSQL.overlaySQL) - c.Assert(err, IsNil) - } + tk.MustExec("create global binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") - pb := &dto.Metric{} - err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeGlobal, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, float64(1)) - err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeGlobal, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, testSQL.memoryUsage) - - sql, hash := normalizeWithDefaultDB(c, testSQL.querySQL, "test") - - bindData := s.domain.BindHandle().GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, testSQL.originSQL) - bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, testSQL.bindSQL) - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, NotNil) - c.Check(bind.Collation, NotNil) - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) - - rs, err := tk.Exec("show global bindings") - c.Assert(err, IsNil) - chk := rs.NewChunk() - err = rs.Next(context.TODO(), chk) - c.Check(err, IsNil) - c.Check(chk.NumRows(), Equals, 1) - row := chk.GetRow(0) - c.Check(row.GetString(0), Equals, testSQL.originSQL) - c.Check(row.GetString(1), Equals, testSQL.bindSQL) - c.Check(row.GetString(2), Equals, "test") - c.Check(row.GetString(3), Equals, "using") - c.Check(row.GetTime(4), NotNil) - c.Check(row.GetTime(5), NotNil) - c.Check(row.GetString(6), NotNil) - c.Check(row.GetString(7), NotNil) - - bindHandle := bindinfo.NewBindHandle(tk.Se) - err = bindHandle.Update(true) - c.Check(err, IsNil) - c.Check(bindHandle.Size(), Equals, 1) - - bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, testSQL.originSQL) - bind = bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, testSQL.bindSQL) - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, NotNil) - c.Check(bind.Collation, NotNil) - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) - - _, err = tk.Exec("drop global " + testSQL.dropSQL) - c.Check(err, IsNil) - bindData = s.domain.BindHandle().GetBindRecord(hash, sql, "test") - c.Check(bindData, IsNil) - - err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeGlobal, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, float64(0)) - err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeGlobal, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - // From newly created global bind handle. - c.Assert(pb.GetGauge().GetValue(), Equals, testSQL.memoryUsage) - - bindHandle = bindinfo.NewBindHandle(tk.Se) - err = bindHandle.Update(true) - c.Check(err, IsNil) - c.Check(bindHandle.Size(), Equals, 0) - - bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, IsNil) - - rs, err = tk.Exec("show global bindings") - c.Assert(err, IsNil) - chk = rs.NewChunk() - err = rs.Next(context.TODO(), chk) - c.Check(err, IsNil) - c.Check(chk.NumRows(), Equals, 0) - - _, err = tk.Exec("delete from mysql.bind_info") - c.Assert(err, IsNil) - } -} + tk.MustExec("execute stmt4;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "IndexJoin"), res.Rows()) + + tk.MustExec("prepare stmt5 from 'insert into t1 select * from t2 where t2.b = 2 and t2.c > 2';") + tk.MustExec("execute stmt5;") + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_b(b)"), res.Rows()) + tk.MustExec("execute stmt5;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) -func (s *testSuite) TestSessionBinding(c *C) { - tk := testkit.NewTestKit(c, s.store) + tk.MustExec("create global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert /*+ use_index(t2,idx_c) */ into t1 select * from t2 where t2.b = 1 and t2.c > 1") - for _, testSQL := range testSQLs { - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("drop table if exists t1") - tk.MustExec("create table t(i int, s varchar(20))") - tk.MustExec("create table t1(i int, s varchar(20))") - tk.MustExec("create index index_t on t(i,s)") + tk.MustExec("execute stmt5;") + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_b(b)"), res.Rows()) - metrics.BindTotalGauge.Reset() - metrics.BindMemoryUsage.Reset() + tk.MustExec("drop global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1") + tk.MustExec("create global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,idx_c) */ * from t2 where t2.b = 1 and t2.c > 1") - _, err := tk.Exec("create session " + testSQL.createSQL) - c.Assert(err, IsNil, Commentf("err %v", err)) + tk.MustExec("execute stmt5;") + require.Equal(t, "t2:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_c(c)"), res.Rows()) + + tk.MustExec("prepare stmt6 from 'replace into t1 select * from t2 where t2.b = 2 and t2.c > 2';") + tk.MustExec("execute stmt6;") + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_b(b)"), res.Rows()) + tk.MustExec("execute stmt6;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) - if testSQL.overlaySQL != "" { - _, err = tk.Exec("create session " + testSQL.overlaySQL) - c.Assert(err, IsNil) - } + tk.MustExec("create global binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,idx_c) */ * from t2 where t2.b = 1 and t2.c > 1") - pb := &dto.Metric{} - err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeSession, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, float64(1)) - err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeSession, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, testSQL.memoryUsage) - - handle := tk.Se.Value(bindinfo.SessionBindInfoKeyType).(*bindinfo.SessionHandle) - bindData := handle.GetBindRecord(testSQL.originSQL, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, testSQL.originSQL) - bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, testSQL.bindSQL) - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, NotNil) - c.Check(bind.Collation, NotNil) - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) - - rs, err := tk.Exec("show global bindings") - c.Assert(err, IsNil) - chk := rs.NewChunk() - err = rs.Next(context.TODO(), chk) - c.Check(err, IsNil) - c.Check(chk.NumRows(), Equals, 0) - - rs, err = tk.Exec("show session bindings") - c.Assert(err, IsNil) - chk = rs.NewChunk() - err = rs.Next(context.TODO(), chk) - c.Check(err, IsNil) - c.Check(chk.NumRows(), Equals, 1) - row := chk.GetRow(0) - c.Check(row.GetString(0), Equals, testSQL.originSQL) - c.Check(row.GetString(1), Equals, testSQL.bindSQL) - c.Check(row.GetString(2), Equals, "test") - c.Check(row.GetString(3), Equals, "using") - c.Check(row.GetTime(4), NotNil) - c.Check(row.GetTime(5), NotNil) - c.Check(row.GetString(6), NotNil) - c.Check(row.GetString(7), NotNil) - - _, err = tk.Exec("drop session " + testSQL.dropSQL) - c.Assert(err, IsNil) - bindData = handle.GetBindRecord(testSQL.originSQL, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, testSQL.originSQL) - c.Check(len(bindData.Bindings), Equals, 0) - - err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeSession, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, float64(0)) - err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeSession, bindinfo.Using).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetGauge().GetValue(), Equals, float64(0)) - } -} + tk.MustExec("execute stmt6;") + require.Equal(t, "t2:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "idx_c(c)"), res.Rows()) -func (s *testSuite) TestGlobalAndSessionBindingBothExist(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") + // TestExplain tk.MustExec("drop table if exists t1") tk.MustExec("drop table if exists t2") tk.MustExec("create table t1(id int)") tk.MustExec("create table t2(id int)") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - c.Assert(tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) + + tk.MustExec("prepare stmt1 from 'SELECT * from t1,t2 where t1.id = t2.id';") + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "HashJoin")) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + tk.MustExec("prepare stmt2 from 'SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id';") + tk.MustExec("execute stmt2;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "MergeJoin")) + tk.MustExec("execute stmt2;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - // Test bindingUsage, which indicates how many times the binding is used. - metrics.BindUsageCounter.Reset() - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) - pb := &dto.Metric{} - err := metrics.BindUsageCounter.WithLabelValues(metrics.ScopeGlobal).Write(pb) - c.Assert(err, IsNil) - c.Assert(pb.GetCounter().GetValue(), Equals, float64(1)) - - // Test 'tidb_use_plan_baselines' - tk.MustExec("set @@tidb_use_plan_baselines = 0") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - tk.MustExec("set @@tidb_use_plan_baselines = 1") + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "MergeJoin")) - // Test 'drop global binding' - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) - tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - - // Test the case when global and session binding both exist - // PART1 : session binding should totally cover global binding - // use merge join as session binding here since the optimizer will choose hash join for this stmt in default - tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_HJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - tk.MustExec("create binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) - // PART2 : the dropped session binding should continue to block the effect of global binding - tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - tk.MustExec("drop binding for SELECT * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) + tk.MustExec("create index index_id on t1(id)") + tk.MustExec("prepare stmt1 from 'SELECT * from t1 use index(index_id)';") + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "IndexReader")) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + tk.MustExec("create global binding for SELECT * from t1 using SELECT * from t1 ignore index(index_id)") + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.False(t, tk.HasPlan4ExplainFor(res, "IndexReader")) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + // Add test for SetOprStmt + tk.MustExec("prepare stmt1 from 'SELECT * from t1 union SELECT * from t1';") + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.False(t, tk.HasPlan4ExplainFor(res, "IndexReader")) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + tk.MustExec("prepare stmt2 from 'SELECT * from t1 use index(index_id) union SELECT * from t1';") + tk.MustExec("execute stmt2;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "IndexReader")) + tk.MustExec("execute stmt2;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + tk.MustExec("create global binding for SELECT * from t1 union SELECT * from t1 using SELECT * from t1 use index(index_id) union SELECT * from t1") + + tk.MustExec("execute stmt1;") + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.HasPlan4ExplainFor(res, "IndexReader")) + + tk.MustExec("drop global binding for SELECT * from t1 union SELECT * from t1") + + // TestBindingSymbolList + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, INDEX ia (a), INDEX ib (b));") + tk.MustExec("insert into t value(1, 1);") + tk.MustExec("prepare stmt1 from 'select a, b from t where a = 3 limit 1, 100';") + tk.MustExec("execute stmt1;") + require.Equal(t, "t:ia", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "ia(a)"), res.Rows()) + tk.MustExec("execute stmt1;") + tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1")) + + tk.MustExec(`create global binding for select a, b from t where a = 1 limit 0, 1 using select a, b from t use index (ib) where a = 1 limit 0, 1`) + + // after binding + tk.MustExec("execute stmt1;") + require.Equal(t, "t:ib", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tkProcess = tk.Session().ShowProcess() + ps = []*util.ProcessInfo{tkProcess} + tk.Session().SetSessionManager(&mockSessionManager1{PS: ps}) + res = tk.MustQuery("explain for connection " + strconv.FormatUint(tkProcess.ID, 10)) + require.True(t, tk.MustUseIndex4ExplainFor(res, "ib(b)"), res.Rows()) } -func (s *testSuite) TestExplain(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestExplain(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t1") tk.MustExec("drop table if exists t2") tk.MustExec("create table t1(id int)") tk.MustExec("create table t2(id int)") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - c.Assert(tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + require.True(t, tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin")) tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") // Add test for SetOprStmt tk.MustExec("create index index_id on t1(id)") - c.Assert(tk.HasPlan("SELECT * from t1 union SELECT * from t1", "IndexReader"), IsFalse) - c.Assert(tk.HasPlan("SELECT * from t1 use index(index_id) union SELECT * from t1", "IndexReader"), IsTrue) + require.False(t, tk.HasPlan("SELECT * from t1 union SELECT * from t1", "IndexReader")) + require.True(t, tk.HasPlan("SELECT * from t1 use index(index_id) union SELECT * from t1", "IndexReader")) tk.MustExec("create global binding for SELECT * from t1 union SELECT * from t1 using SELECT * from t1 use index(index_id) union SELECT * from t1") - c.Assert(tk.HasPlan("SELECT * from t1 union SELECT * from t1", "IndexReader"), IsTrue) + require.True(t, tk.HasPlan("SELECT * from t1 union SELECT * from t1", "IndexReader")) tk.MustExec("drop global binding for SELECT * from t1 union SELECT * from t1") } // TestBindingSymbolList tests sql with "?, ?, ?, ?", fixes #13871 -func (s *testSuite) TestBindingSymbolList(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestBindingSymbolList(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, INDEX ia (a), INDEX ib (b));") @@ -645,89 +387,93 @@ func (s *testSuite) TestBindingSymbolList(c *C) { // before binding tk.MustQuery("select a, b from t where a = 3 limit 1, 100") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ia") - c.Assert(tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)"), IsTrue) + require.Equal(t, "t:ia", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)")) tk.MustExec(`create global binding for select a, b from t where a = 1 limit 0, 1 using select a, b from t use index (ib) where a = 1 limit 0, 1`) // after binding tk.MustQuery("select a, b from t where a = 3 limit 1, 100") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ib") - c.Assert(tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ib(b)"), IsTrue) + require.Equal(t, "t:ib", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ib(b)")) // Normalize sql, hash := parser.NormalizeDigest("select a, b from test . t where a = 1 limit 0, 1") - bindData := s.domain.BindHandle().GetBindRecord(hash.String(), sql, "test") - c.Assert(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select `a` , `b` from `test` . `t` where `a` = ? limit ...") + bindData := dom.BindHandle().GetBindRecord(hash.String(), sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select `a` , `b` from `test` . `t` where `a` = ? limit ...", bindData.OriginalSQL) bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, "SELECT `a`,`b` FROM `test`.`t` USE INDEX (`ib`) WHERE `a` = 1 LIMIT 0,1") - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, NotNil) - c.Check(bind.Collation, NotNil) - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) + require.Equal(t, "SELECT `a`,`b` FROM `test`.`t` USE INDEX (`ib`) WHERE `a` = 1 LIMIT 0,1", bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.NotNil(t, bind.Charset) + require.NotNil(t, bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) } -func (s *testSuite) TestDMLSQLBind(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestDMLSQLBind(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t1, t2") tk.MustExec("create table t1(a int, b int, c int, key idx_b(b), key idx_c(c))") tk.MustExec("create table t2(a int, b int, c int, key idx_b(b), key idx_c(c))") tk.MustExec("delete from t1 where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t1:idx_b") - c.Assert(tk.MustUseIndex("delete from t1 where b = 1 and c > 1", "idx_b(b)"), IsTrue) + require.Equal(t, "t1:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("delete from t1 where b = 1 and c > 1", "idx_b(b)")) tk.MustExec("create global binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1,idx_c) */ from t1 where b = 1 and c > 1") tk.MustExec("delete from t1 where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t1:idx_c") - c.Assert(tk.MustUseIndex("delete from t1 where b = 1 and c > 1", "idx_c(c)"), IsTrue) + require.Equal(t, "t1:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("delete from t1 where b = 1 and c > 1", "idx_c(c)")) - c.Assert(tk.HasPlan("delete t1, t2 from t1 inner join t2 on t1.b = t2.b", "HashJoin"), IsTrue) + require.True(t, tk.HasPlan("delete t1, t2 from t1 inner join t2 on t1.b = t2.b", "HashJoin")) tk.MustExec("create global binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b using delete /*+ inl_join(t1) */ t1, t2 from t1 inner join t2 on t1.b = t2.b") - c.Assert(tk.HasPlan("delete t1, t2 from t1 inner join t2 on t1.b = t2.b", "IndexJoin"), IsTrue) + require.True(t, tk.HasPlan("delete t1, t2 from t1 inner join t2 on t1.b = t2.b", "IndexJoin")) tk.MustExec("update t1 set a = 1 where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t1:idx_b") - c.Assert(tk.MustUseIndex("update t1 set a = 1 where b = 1 and c > 1", "idx_b(b)"), IsTrue) + require.Equal(t, "t1:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("update t1 set a = 1 where b = 1 and c > 1", "idx_b(b)")) tk.MustExec("create global binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1,idx_c) */ t1 set a = 1 where b = 1 and c > 1") tk.MustExec("delete from t1 where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t1:idx_c") - c.Assert(tk.MustUseIndex("update t1 set a = 1 where b = 1 and c > 1", "idx_c(c)"), IsTrue) + require.Equal(t, "t1:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("update t1 set a = 1 where b = 1 and c > 1", "idx_c(c)")) - c.Assert(tk.HasPlan("update t1, t2 set t1.a = 1 where t1.b = t2.b", "HashJoin"), IsTrue) + require.True(t, tk.HasPlan("update t1, t2 set t1.a = 1 where t1.b = t2.b", "HashJoin")) tk.MustExec("create global binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") - c.Assert(tk.HasPlan("update t1, t2 set t1.a = 1 where t1.b = t2.b", "IndexJoin"), IsTrue) + require.True(t, tk.HasPlan("update t1, t2 set t1.a = 1 where t1.b = t2.b", "IndexJoin")) tk.MustExec("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t2:idx_b") - c.Assert(tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)"), IsTrue) + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)")) tk.MustExec("create global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert /*+ use_index(t2,idx_c) */ into t1 select * from t2 where t2.b = 1 and t2.c > 1") tk.MustExec("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t2:idx_b") - c.Assert(tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)"), IsTrue) + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)")) tk.MustExec("drop global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1") tk.MustExec("create global binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,idx_c) */ * from t2 where t2.b = 1 and t2.c > 1") tk.MustExec("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t2:idx_c") - c.Assert(tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_c(c)"), IsTrue) + require.Equal(t, "t2:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("insert into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_c(c)")) tk.MustExec("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t2:idx_b") - c.Assert(tk.MustUseIndex("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)"), IsTrue) + require.Equal(t, "t2:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_b(b)")) tk.MustExec("create global binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,idx_c) */ * from t2 where t2.b = 1 and t2.c > 1") tk.MustExec("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t2:idx_c") - c.Assert(tk.MustUseIndex("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_c(c)"), IsTrue) + require.Equal(t, "t2:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("replace into t1 select * from t2 where t2.b = 2 and t2.c > 2", "idx_c(c)")) } -func (s *testSuite) TestBestPlanInBaselines(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestBestPlanInBaselines(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, INDEX ia (a), INDEX ib (b));") @@ -735,37 +481,39 @@ func (s *testSuite) TestBestPlanInBaselines(c *C) { // before binding tk.MustQuery("select a, b from t where a = 3 limit 1, 100") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ia") - c.Assert(tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)"), IsTrue) + require.Equal(t, "t:ia", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)")) tk.MustQuery("select a, b from t where b = 3 limit 1, 100") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ib") - c.Assert(tk.MustUseIndex("select a, b from t where b = 3 limit 1, 100", "ib(b)"), IsTrue) + require.Equal(t, "t:ib", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where b = 3 limit 1, 100", "ib(b)")) tk.MustExec(`create global binding for select a, b from t where a = 1 limit 0, 1 using select /*+ use_index(@sel_1 test.t ia) */ a, b from t where a = 1 limit 0, 1`) tk.MustExec(`create global binding for select a, b from t where b = 1 limit 0, 1 using select /*+ use_index(@sel_1 test.t ib) */ a, b from t where b = 1 limit 0, 1`) - sql, hash := normalizeWithDefaultDB(c, "select a, b from t where a = 1 limit 0, 1", "test") - bindData := s.domain.BindHandle().GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select `a` , `b` from `test` . `t` where `a` = ? limit ...") + sql, hash := utilNormalizeWithDefaultDB(t, "select a, b from t where a = 1 limit 0, 1", "test") + bindData := dom.BindHandle().GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select `a` , `b` from `test` . `t` where `a` = ? limit ...", bindData.OriginalSQL) bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `ia`)*/ `a`,`b` FROM `test`.`t` WHERE `a` = 1 LIMIT 0,1") - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `ia`)*/ `a`,`b` FROM `test`.`t` WHERE `a` = 1 LIMIT 0,1", bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) tk.MustQuery("select a, b from t where a = 3 limit 1, 10") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ia") - c.Assert(tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)"), IsTrue) + require.Equal(t, "t:ia", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where a = 3 limit 1, 100", "ia(a)")) tk.MustQuery("select a, b from t where b = 3 limit 1, 100") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:ib") - c.Assert(tk.MustUseIndex("select a, b from t where b = 3 limit 1, 100", "ib(b)"), IsTrue) + require.Equal(t, "t:ib", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select a, b from t where b = 3 limit 1, 100", "ib(b)")) } -func (s *testSuite) TestErrorBind(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestErrorBind(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustGetErrMsg("create global binding for select * from t using select * from t", "[schema:1146]Table 'test.t' doesn't exist") tk.MustExec("drop table if exists t") @@ -775,399 +523,46 @@ func (s *testSuite) TestErrorBind(c *C) { tk.MustExec("create index index_t on t(i,s)") _, err := tk.Exec("create global binding for select * from t where i>100 using select * from t use index(index_t) where i>100") - c.Assert(err, IsNil, Commentf("err %v", err)) + require.NoError(t, err, "err %v", err) sql, hash := parser.NormalizeDigest("select * from test . t where i > ?") - bindData := s.domain.BindHandle().GetBindRecord(hash.String(), sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `i` > ?") + bindData := dom.BindHandle().GetBindRecord(hash.String(), sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `i` > ?", bindData.OriginalSQL) bind := bindData.Bindings[0] - c.Check(bind.BindSQL, Equals, "SELECT * FROM `test`.`t` USE INDEX (`index_t`) WHERE `i` > 100") - c.Check(bindData.Db, Equals, "test") - c.Check(bind.Status, Equals, "using") - c.Check(bind.Charset, NotNil) - c.Check(bind.Collation, NotNil) - c.Check(bind.CreateTime, NotNil) - c.Check(bind.UpdateTime, NotNil) + require.Equal(t, "SELECT * FROM `test`.`t` USE INDEX (`index_t`) WHERE `i` > 100", bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.NotNil(t, bind.Charset) + require.NotNil(t, bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) tk.MustExec("drop index index_t on t") _, err = tk.Exec("select * from t where i > 10") - c.Check(err, IsNil) + require.NoError(t, err) - s.domain.BindHandle().DropInvalidBindRecord() + dom.BindHandle().DropInvalidBindRecord() rs, err := tk.Exec("show global bindings") - c.Assert(err, IsNil) - chk := rs.NewChunk() + require.NoError(t, err) + chk := rs.NewChunk(nil) err = rs.Next(context.TODO(), chk) - c.Check(err, IsNil) - c.Check(chk.NumRows(), Equals, 0) -} - -func (s *testSuite) TestPreparedStmt(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - - orgEnable := plannercore.PreparedPlanCacheEnabled() - defer func() { - plannercore.SetPreparedPlanCache(orgEnable) - }() - plannercore.SetPreparedPlanCache(false) // requires plan cache disabled, or the IndexNames = 1 on first test. - - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec(`prepare stmt1 from 'select * from t'`) - tk.MustExec("execute stmt1") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 0) - - tk.MustExec("create binding for select * from t using select * from t use index(idx)") - tk.MustExec("execute stmt1") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx") - - tk.MustExec("drop binding for select * from t") - tk.MustExec("execute stmt1") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 0) - - tk.MustExec("drop table t") - tk.MustExec("create table t(a int, b int, c int, index idx_b(b), index idx_c(c))") - tk.MustExec("set @p = 1") - - tk.MustExec("prepare stmt from 'delete from t where b = ? and c > ?'") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") - tk.MustExec("create binding for delete from t where b = 2 and c > 2 using delete /*+ use_index(t,idx_c) */ from t where b = 2 and c > 2") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") - - tk.MustExec("prepare stmt from 'update t set a = 1 where b = ? and c > ?'") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") - tk.MustExec("create binding for update t set a = 2 where b = 2 and c > 2 using update /*+ use_index(t,idx_c) */ t set a = 2 where b = 2 and c > 2") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") - - tk.MustExec("drop table if exists t1") - tk.MustExec("create table t1 like t") - tk.MustExec("prepare stmt from 'insert into t1 select * from t where t.b = ? and t.c > ?'") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") - tk.MustExec("create binding for insert into t1 select * from t where t.b = 2 and t.c > 2 using insert into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 2 and t.c > 2") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") - - tk.MustExec("prepare stmt from 'replace into t1 select * from t where t.b = ? and t.c > ?'") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") - tk.MustExec("create binding for replace into t1 select * from t where t.b = 2 and t.c > 2 using replace into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 2 and t.c > 2") - tk.MustExec("execute stmt using @p,@p") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") + require.NoError(t, err) + require.Equal(t, 0, chk.NumRows()) } -func (s *testSuite) TestDMLCapturePlanBaseline(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec(" set @@tidb_capture_plan_baselines = on") +func TestDMLEvolveBaselines(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true defer func() { - tk.MustExec(" set @@tidb_capture_plan_baselines = off") + config.CheckTableBeforeDrop = originalVal }() - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") - tk.MustExec("create table t1 like t") - s.domain.BindHandle().CaptureBaselines() - tk.MustQuery("show global bindings").Check(testkit.Rows()) - tk.MustExec("delete from t where b = 1 and c > 1") - tk.MustExec("delete from t where b = 1 and c > 1") - tk.MustExec("update t set a = 1 where b = 1 and c > 1") - tk.MustExec("update t set a = 1 where b = 1 and c > 1") - tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("insert into t1 values(1,1,1)") - tk.MustExec("insert into t1 values(1,1,1)") - tk.MustExec("replace into t1 values(1,1,1)") - tk.MustExec("replace into t1 values(1,1,1)") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 0) - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("delete from t where b = 1 and c > 1") - tk.MustExec("delete from t where b = 1 and c > 1") - tk.MustExec("update t set a = 1 where b = 1 and c > 1") - tk.MustExec("update t set a = 1 where b = 1 and c > 1") - tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") - tk.MustExec("insert into t1 values(1,1,1)") - tk.MustExec("insert into t1 values(1,1,1)") - tk.MustExec("replace into t1 values(1,1,1)") - tk.MustExec("replace into t1 values(1,1,1)") - tk.MustExec("admin capture bindings") - rows = tk.MustQuery("show global bindings").Sort().Rows() - c.Assert(len(rows), Equals, 4) - c.Assert(rows[0][0], Equals, "delete from `test` . `t` where `b` = ? and `c` > ?") - c.Assert(rows[0][1], Equals, "DELETE /*+ use_index(@`del_1` `test`.`t` `idx_b`)*/ FROM `test`.`t` WHERE `b` = 1 AND `c` > 1") - c.Assert(rows[1][0], Equals, "insert into `test` . `t1` select * from `test` . `t` where `t` . `b` = ? and `t` . `c` > ?") - c.Assert(rows[1][1], Equals, "INSERT INTO `test`.`t1` SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_b`)*/ * FROM `test`.`t` WHERE `t`.`b` = 1 AND `t`.`c` > 1") - c.Assert(rows[2][0], Equals, "replace into `test` . `t1` select * from `test` . `t` where `t` . `b` = ? and `t` . `c` > ?") - c.Assert(rows[2][1], Equals, "REPLACE INTO `test`.`t1` SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_b`)*/ * FROM `test`.`t` WHERE `t`.`b` = 1 AND `t`.`c` > 1") - c.Assert(rows[3][0], Equals, "update `test` . `t` set `a` = ? where `b` = ? and `c` > ?") - c.Assert(rows[3][1], Equals, "UPDATE /*+ use_index(@`upd_1` `test`.`t` `idx_b`)*/ `test`.`t` SET `a`=1 WHERE `b` = 1 AND `c` > 1") -} + store, clean := testkit.CreateMockStore(t) + defer clean() -func (s *testSuite) TestCapturePlanBaseline(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec(" set @@tidb_capture_plan_baselines = on") - defer func() { - tk.MustExec(" set @@tidb_capture_plan_baselines = off") - }() - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int)") - s.domain.BindHandle().CaptureBaselines() - tk.MustQuery("show global bindings").Check(testkit.Rows()) - tk.MustExec("select count(*) from t where a > 10") - tk.MustExec("select count(*) from t where a > 10") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 0) - - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from t where a > 10") - tk.MustExec("select * from t where a > 10") - tk.MustExec("admin capture bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` > 10") -} - -func (s *testSuite) TestCaptureDBCaseSensitivity(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec("drop database if exists SPM") - tk.MustExec("create database SPM") - tk.MustExec("use SPM") - tk.MustExec("create table t(a int, b int, key(b))") - tk.MustExec("create global binding for select * from t using select /*+ use_index(t) */ * from t") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select /*+ use_index(t,b) */ * from t") - tk.MustExec("select /*+ use_index(t,b) */ * from t") - tk.MustExec("admin capture bindings") - // The capture should ignore the case sensitivity for DB name when checking if any binding exists, - // so there would be no new binding captured. - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(`t` )*/ * FROM `SPM`.`t`") - c.Assert(rows[0][8], Equals, "manual") -} - -func (s *testSuite) TestBaselineDBLowerCase(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec("drop database if exists SPM") - tk.MustExec("create database SPM") - tk.MustExec("use SPM") - tk.MustExec("create table t(a int, b int)") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("update t set a = a + 1") - tk.MustExec("update t set a = a + 1") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "update `spm` . `t` set `a` = `a` + ?") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustExec("drop global binding for update t set a = a + 1") - rows = tk.MustQuery("show global bindings").Rows() - // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. - c.Assert(len(rows), Equals, 0) - - tk.MustExec("create global binding for select * from t using select * from t") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `spm` . `t`") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustExec("drop global binding for select * from t") - rows = tk.MustQuery("show global bindings").Rows() - // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. - c.Assert(len(rows), Equals, 0) - - tk.MustExec("create session binding for select * from t using select * from t") - rows = tk.MustQuery("show session bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `spm` . `t`") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustExec("drop session binding for select * from t") - rows = tk.MustQuery("show session bindings").Rows() - // DROP SESSION BINGING should remove the binding even if we are in SPM database. - c.Assert(len(rows), Equals, 0) - - s.cleanBindingEnv(tk) - // Simulate existing bindings with upper case default_db. - tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select * from `spm` . `t`', 'SPM', 'using', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + - bindinfo.Manual + "')") - tk.MustQuery("select original_sql, default_db from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( - "select * from `spm` . `t` SPM", - )) - tk.MustExec("admin reload bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `spm` . `t`") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustExec("drop global binding for select * from t") - rows = tk.MustQuery("show global bindings").Rows() - // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. - c.Assert(len(rows), Equals, 0) - - s.cleanBindingEnv(tk) - // Simulate existing bindings with upper case default_db. - tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select * from `spm` . `t`', 'SPM', 'using', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + - bindinfo.Manual + "')") - tk.MustQuery("select original_sql, default_db from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( - "select * from `spm` . `t` SPM", - )) - tk.MustExec("admin reload bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `spm` . `t`") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustExec("create global binding for select * from t using select * from t") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `spm` . `t`") - // default_db should have lower case. - c.Assert(rows[0][2], Equals, "spm") - tk.MustQuery("select original_sql, default_db, status from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( - "select * from `spm` . `t` spm using", - )) -} - -func (s *testSuite) TestCaptureBaselinesDefaultDB(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec(" set @@tidb_capture_plan_baselines = on") - defer func() { - tk.MustExec(" set @@tidb_capture_plan_baselines = off") - }() - tk.MustExec("use test") - tk.MustExec("drop database if exists spm") - tk.MustExec("create database spm") - tk.MustExec("create table spm.t(a int, index idx_a(a))") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from spm.t ignore index(idx_a) where a > 10") - tk.MustExec("select * from spm.t ignore index(idx_a) where a > 10") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - // Default DB should be "" when all columns have explicit database name. - c.Assert(rows[0][2], Equals, "") - c.Assert(rows[0][3], Equals, "using") - tk.MustExec("use spm") - tk.MustExec("select * from spm.t where a > 10") - // Should use TableScan because of the "ignore index" binding. - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 0) -} - -func (s *testSuite) TestCapturePreparedStmt(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") - c.Assert(tk.MustUseIndex("select * from t where b = 1 and c > 1", "idx_b(b)"), IsTrue) - tk.MustExec("prepare stmt from 'select /*+ use_index(t,idx_c) */ * from t where b = ? and c > ?'") - tk.MustExec("set @p = 1") - tk.MustExec("execute stmt using @p, @p") - tk.MustExec("execute stmt using @p, @p") - - tk.MustQuery("show global bindings").Check(testkit.Rows()) - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `b` = ? and `c` > ?") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`)*/ * FROM `test`.`t` WHERE `b` = ? AND `c` > ?") - - c.Assert(tk.MustUseIndex("select /*+ use_index(t,idx_b) */ * from t where b = 1 and c > 1", "idx_c(c)"), IsTrue) - tk.MustExec("admin flush bindings") - tk.MustExec("admin evolve bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `b` = ? and `c` > ?") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`)*/ * FROM `test`.`t` WHERE `b` = ? AND `c` > ?") -} - -func (s *testSuite) TestDropSingleBindings(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b))") - - // Test drop session bindings. - tk.MustExec("create binding for select * from t using select * from t use index(idx_a)") - tk.MustExec("create binding for select * from t using select * from t use index(idx_b)") - rows := tk.MustQuery("show bindings").Rows() - // The size of bindings is equal to one. Because for one normalized sql, - // the `create binding` clears all the origin bindings. - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)") - tk.MustExec("drop binding for select * from t using select * from t use index(idx_a)") - rows = tk.MustQuery("show bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)") - tk.MustExec("drop table t") - tk.MustExec("drop binding for select * from t using select * from t use index(idx_b)") - rows = tk.MustQuery("show bindings").Rows() - c.Assert(len(rows), Equals, 0) - - tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b))") - // Test drop global bindings. - tk.MustExec("create global binding for select * from t using select * from t use index(idx_a)") - tk.MustExec("create global binding for select * from t using select * from t use index(idx_b)") - rows = tk.MustQuery("show global bindings").Rows() - // The size of bindings is equal to one. Because for one normalized sql, - // the `create binding` clears all the origin bindings. - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)") - tk.MustExec("drop global binding for select * from t using select * from t use index(idx_a)") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)") - tk.MustExec("drop table t") - tk.MustExec("drop global binding for select * from t using select * from t use index(idx_b)") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 0) -} - -func (s *testSuite) TestDMLEvolveBaselines(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, c int, index idx_b(b), index idx_c(c))") @@ -1177,57 +572,65 @@ func (s *testSuite) TestDMLEvolveBaselines(c *C) { tk.MustExec("create global binding for delete from t where b = 1 and c > 1 using delete /*+ use_index(t,idx_c) */ from t where b = 1 and c > 1") rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) + require.Len(t, rows, 1) tk.MustExec("delete /*+ use_index(t,idx_b) */ from t where b = 2 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) + require.Len(t, rows, 1) tk.MustExec("admin evolve bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) + require.Len(t, rows, 1) tk.MustExec("create global binding for update t set a = 1 where b = 1 and c > 1 using update /*+ use_index(t,idx_c) */ t set a = 1 where b = 1 and c > 1") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) + require.Len(t, rows, 2) tk.MustExec("update /*+ use_index(t,idx_b) */ t set a = 2 where b = 2 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) + require.Len(t, rows, 2) tk.MustExec("admin evolve bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) + require.Len(t, rows, 2) tk.MustExec("create table t1 like t") tk.MustExec("create global binding for insert into t1 select * from t where t.b = 1 and t.c > 1 using insert into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 1 and t.c > 1") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 3) + require.Len(t, rows, 3) tk.MustExec("insert into t1 select /*+ use_index(t,idx_b) */ * from t where t.b = 2 and t.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 3) + require.Len(t, rows, 3) tk.MustExec("admin evolve bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 3) + require.Len(t, rows, 3) tk.MustExec("create global binding for replace into t1 select * from t where t.b = 1 and t.c > 1 using replace into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 1 and t.c > 1") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 4) + require.Len(t, rows, 4) tk.MustExec("replace into t1 select /*+ use_index(t,idx_b) */ * from t where t.b = 2 and t.c > 2") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 4) + require.Len(t, rows, 4) tk.MustExec("admin evolve bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 4) + require.Len(t, rows, 4) } -func (s *testSuite) TestAddEvolveTasks(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestAddEvolveTasks(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b), index idx_c(c))") @@ -1237,70 +640,49 @@ func (s *testSuite) TestAddEvolveTasks(c *C) { tk.MustExec("set @@tidb_evolve_plan_baselines=1") // It cannot choose table path although it has lowest cost. tk.MustQuery("select * from t where a >= 4 and b >= 1 and c = 0") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_a") + require.Equal(t, "t:idx_a", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0") - c.Assert(rows[1][3], Equals, "pending verify") + require.Len(t, rows, 2) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0", rows[0][1]) + require.Equal(t, "pending verify", rows[0][3]) tk.MustExec("admin evolve bindings") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0") - status := rows[1][3].(string) - c.Assert(status == "using" || status == "rejected", IsTrue) + require.Len(t, rows, 2) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0", rows[0][1]) + status := rows[0][3].(string) + require.True(t, status == bindinfo.Enabled || status == bindinfo.Rejected) } -func (s *testSuite) TestRuntimeHintsInEvolveTasks(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestRuntimeHintsInEvolveTasks(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("set @@tidb_evolve_plan_baselines=1") tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b), index idx_c(c))") - // these runtime hints which don't be contained by the original binding should be ignored tk.MustExec("create global binding for select * from t where a >= 1 and b >= 1 and c = 0 using select * from t use index(idx_a) where a >= 1 and b >= 1 and c = 0") - tk.MustQuery("select /*+ MAX_EXECUTION_TIME(5000) */* from t where a >= 4 and b >= 1 and c = 0") + tk.MustQuery("select /*+ MAX_EXECUTION_TIME(5000) */ * from t where a >= 4 and b >= 1 and c = 0") tk.MustExec("admin flush bindings") rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`)*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0") // MAX_EXECUTION_TIME is ignored - - s.cleanBindingEnv(tk) - tk.MustExec("create global binding for select * from t where a >= 1 and b >= 1 and c = 0 using select /*+ MAX_EXECUTION_TIME(5000) */* from t use index(idx_a) where a >= 1 and b >= 1 and c = 0") - tk.MustQuery("select /*+ MAX_EXECUTION_TIME(5000) */* from t where a >= 4 and b >= 1 and c = 0") - tk.MustExec("admin flush bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`), max_execution_time(5000)*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0") + require.Len(t, rows, 2) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`), max_execution_time(5000)*/ * FROM `test`.`t` WHERE `a` >= 4 AND `b` >= 1 AND `c` = 0", rows[0][1]) } -func (s *testSuite) TestBindingCache(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from t using select * from t use index(idx);") - tk.MustExec("create database tmp") - tk.MustExec("use tmp") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from t using select * from t use index(idx);") - - c.Assert(s.domain.BindHandle().Update(false), IsNil) - c.Assert(s.domain.BindHandle().Update(false), IsNil) - res := tk.MustQuery("show global bindings") - c.Assert(len(res.Rows()), Equals, 2) +func TestDefaultSessionVars(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() - tk.MustExec("drop global binding for select * from t;") - c.Assert(s.domain.BindHandle().Update(false), IsNil) - c.Assert(len(s.domain.BindHandle().GetAllBindRecord()), Equals, 1) -} - -func (s *testSuite) TestDefaultSessionVars(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustQuery(`show variables like "%baselines%"`).Sort().Check(testkit.Rows( "tidb_capture_plan_baselines OFF", "tidb_evolve_plan_baselines OFF", @@ -1311,338 +693,197 @@ func (s *testSuite) TestDefaultSessionVars(c *C) { "tidb_use_plan_baselines ON")) } -func (s *testSuite) TestCaptureBaselinesScope(c *C) { - tk1 := testkit.NewTestKit(c, s.store) - tk2 := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk1) +func TestCaptureBaselinesScope(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk1 := testkit.NewTestKit(t, store) + tk2 := testkit.NewTestKit(t, store) + + utilCleanBindingEnv(tk1, dom) tk1.MustQuery(`show session variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( "tidb_capture_plan_baselines OFF", )) tk1.MustQuery(`show global variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( "tidb_capture_plan_baselines OFF", )) - tk1.MustQuery(`select @@session.tidb_capture_plan_baselines`).Check(testkit.Rows( - "0", - )) tk1.MustQuery(`select @@global.tidb_capture_plan_baselines`).Check(testkit.Rows( "0", )) - tk1.MustExec("set @@session.tidb_capture_plan_baselines = on") + tk1.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") defer func() { - tk1.MustExec(" set @@session.tidb_capture_plan_baselines = off") + tk1.MustExec(" set GLOBAL tidb_capture_plan_baselines = off") }() - tk1.MustQuery(`show session variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( + + tk1.MustQuery(`show variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( "tidb_capture_plan_baselines ON", )) tk1.MustQuery(`show global variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( - "tidb_capture_plan_baselines OFF", - )) - tk1.MustQuery(`select @@session.tidb_capture_plan_baselines`).Check(testkit.Rows( - "1", - )) - tk1.MustQuery(`select @@global.tidb_capture_plan_baselines`).Check(testkit.Rows( - "0", - )) - tk2.MustQuery(`show session variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( "tidb_capture_plan_baselines ON", )) tk2.MustQuery(`show global variables like "tidb_capture_plan_baselines"`).Check(testkit.Rows( - "tidb_capture_plan_baselines OFF", - )) - tk2.MustQuery(`select @@session.tidb_capture_plan_baselines`).Check(testkit.Rows( - "1", + "tidb_capture_plan_baselines ON", )) tk2.MustQuery(`select @@global.tidb_capture_plan_baselines`).Check(testkit.Rows( - "0", + "1", )) } -func (s *testSuite) TestDuplicateBindings(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from t using select * from t use index(idx);") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - createTime := rows[0][4] - time.Sleep(1000000) - tk.MustExec("create global binding for select * from t using select * from t use index(idx);") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(createTime == rows[0][4], Equals, false) - - tk.MustExec("create session binding for select * from t using select * from t use index(idx);") - rows = tk.MustQuery("show session bindings").Rows() - c.Assert(len(rows), Equals, 1) - createTime = rows[0][4] - time.Sleep(1000000) - tk.MustExec("create session binding for select * from t using select * from t use index(idx);") - rows = tk.MustQuery("show session bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(createTime == rows[0][4], Equals, false) -} +func TestStmtHints(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() -func (s *testSuite) TestStmtHints(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, index idx(a))") tk.MustExec("create global binding for select * from t using select /*+ MAX_EXECUTION_TIME(100), MEMORY_QUOTA(1 GB) */ * from t use index(idx)") tk.MustQuery("select * from t") - c.Assert(tk.Se.GetSessionVars().StmtCtx.MemQuotaQuery, Equals, int64(1073741824)) - c.Assert(tk.Se.GetSessionVars().StmtCtx.MaxExecutionTime, Equals, uint64(100)) + require.Equal(t, int64(1073741824), tk.Session().GetSessionVars().StmtCtx.MemQuotaQuery) + require.Equal(t, uint64(100), tk.Session().GetSessionVars().StmtCtx.MaxExecutionTime) tk.MustQuery("select a, b from t") - c.Assert(tk.Se.GetSessionVars().StmtCtx.MemQuotaQuery, Equals, int64(0)) - c.Assert(tk.Se.GetSessionVars().StmtCtx.MaxExecutionTime, Equals, uint64(0)) -} - -func (s *testSuite) TestReloadBindings(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from t using select * from t use index(idx)") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - rows = tk.MustQuery("select * from mysql.bind_info").Rows() - c.Assert(len(rows), Equals, 1) - tk.MustExec("truncate table mysql.bind_info") - c.Assert(s.domain.BindHandle().Update(false), IsNil) - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(s.domain.BindHandle().Update(true), IsNil) - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - tk.MustExec("admin reload bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 0) + require.Equal(t, int64(0), tk.Session().GetSessionVars().StmtCtx.MemQuotaQuery) + require.Equal(t, uint64(0), tk.Session().GetSessionVars().StmtCtx.MaxExecutionTime) } -func (s *testSuite) TestDefaultDB(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from test.t using select * from test.t use index(idx)") - tk.MustExec("use mysql") - tk.MustQuery("select * from test.t") - // Even in another database, we could still use the bindings. - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx") - tk.MustExec("drop global binding for select * from test.t") - tk.MustQuery("show global bindings").Check(testkit.Rows()) +func TestPrivileges(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() - tk.MustExec("use test") - tk.MustExec("create session binding for select * from test.t using select * from test.t use index(idx)") - tk.MustExec("use mysql") - tk.MustQuery("select * from test.t") - // Even in another database, we could still use the bindings. - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx") - tk.MustExec("drop session binding for select * from test.t") - tk.MustQuery("show session bindings").Check(testkit.Rows()) -} - -func (s *testSuite) TestEvolveInvalidBindings(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx_a(a))") - tk.MustExec("create global binding for select * from t where a > 10 using select /*+ USE_INDEX(t) */ * from t where a > 10") - // Manufacture a rejected binding by hacking mysql.bind_info. - tk.MustExec("insert into mysql.bind_info values('select * from test . t where a > ?', 'SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10', 'test', 'rejected', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + - bindinfo.Manual + "')") - tk.MustQuery("select bind_sql, status from mysql.bind_info where source != 'builtin'").Sort().Check(testkit.Rows( - "SELECT /*+ USE_INDEX(`t` )*/ * FROM `test`.`t` WHERE `a` > 10 using", - "SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10 rejected", - )) - // Reload cache from mysql.bind_info. - s.domain.BindHandle().Clear() - c.Assert(s.domain.BindHandle().Update(true), IsNil) - - tk.MustExec("alter table t drop index idx_a") - tk.MustExec("admin evolve bindings") - c.Assert(s.domain.BindHandle().Update(false), IsNil) - rows := tk.MustQuery("show global bindings").Sort().Rows() - c.Assert(len(rows), Equals, 2) - // Make sure this "using" binding is not overrided. - c.Assert(rows[0][1], Equals, "SELECT /*+ USE_INDEX(`t` )*/ * FROM `test`.`t` WHERE `a` > 10") - status := rows[0][3].(string) - c.Assert(status == "using", IsTrue) - c.Assert(rows[1][1], Equals, "SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10") - status = rows[1][3].(string) - c.Assert(status == "using" || status == "rejected", IsTrue) -} - -func (s *testSuite) TestOutdatedInfoSchema(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustExec("create global binding for select * from t using select * from t use index(idx)") - c.Assert(s.domain.BindHandle().Update(false), IsNil) - tk.MustExec("truncate table mysql.bind_info") - tk.MustExec("create global binding for select * from t using select * from t use index(idx)") -} - -func (s *testSuite) TestPrivileges(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, index idx(a))") tk.MustExec("create global binding for select * from t using select * from t use index(idx)") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) + require.Len(t, rows, 1) tk.MustExec("create user test@'%'") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "test", Hostname: "%"}, nil, nil), IsTrue) + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "test", Hostname: "%"}, nil, nil)) rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 0) + require.Len(t, rows, 0) } -func (s *testSuite) TestHintsSetEvolveTask(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestHintsSetEvolveTask(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, index idx_a(a))") tk.MustExec("create global binding for select * from t where a > 10 using select * from t ignore index(idx_a) where a > 10") tk.MustExec("set @@tidb_evolve_plan_baselines=1") tk.MustQuery("select * from t use index(idx_a) where a > 0") - bindHandle := s.domain.BindHandle() + bindHandle := dom.BindHandle() bindHandle.SaveEvolveTasksToStore() // Verify the added Binding for evolution contains valid ID and Hint, otherwise, panic may happen. - sql, hash := normalizeWithDefaultDB(c, "select * from t where a > ?", "test") + sql, hash := utilNormalizeWithDefaultDB(t, "select * from t where a > ?", "test") bindData := bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 2) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 2) bind := bindData.Bindings[1] - c.Assert(bind.Status, Equals, bindinfo.PendingVerify) - c.Assert(bind.ID, Not(Equals), "") - c.Assert(bind.Hint, NotNil) + require.Equal(t, bindinfo.PendingVerify, bind.Status) + require.NotEqual(t, "", bind.ID) + require.NotNil(t, bind.Hint) } -func (s *testSuite) TestHintsSetID(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestHintsSetID(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, index idx_a(a))") tk.MustExec("create global binding for select * from t where a > 10 using select /*+ use_index(test.t, idx_a) */ * from t where a > 10") - bindHandle := s.domain.BindHandle() + bindHandle := dom.BindHandle() // Verify the added Binding contains ID with restored query block. - sql, hash := normalizeWithDefaultDB(c, "select * from t where a > ?", "test") + sql, hash := utilNormalizeWithDefaultDB(t, "select * from t where a > ?", "test") bindData := bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind := bindData.Bindings[0] - c.Assert(bind.ID, Equals, "use_index(@`sel_1` `test`.`t` `idx_a`)") + require.Equal(t, "use_index(@`sel_1` `test`.`t` `idx_a`)", bind.ID) - s.cleanBindingEnv(tk) + utilCleanBindingEnv(tk, dom) tk.MustExec("create global binding for select * from t where a > 10 using select /*+ use_index(t, idx_a) */ * from t where a > 10") bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind = bindData.Bindings[0] - c.Assert(bind.ID, Equals, "use_index(@`sel_1` `test`.`t` `idx_a`)") + require.Equal(t, "use_index(@`sel_1` `test`.`t` `idx_a`)", bind.ID) - s.cleanBindingEnv(tk) + utilCleanBindingEnv(tk, dom) tk.MustExec("create global binding for select * from t where a > 10 using select /*+ use_index(@sel_1 t, idx_a) */ * from t where a > 10") bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind = bindData.Bindings[0] - c.Assert(bind.ID, Equals, "use_index(@`sel_1` `test`.`t` `idx_a`)") + require.Equal(t, "use_index(@`sel_1` `test`.`t` `idx_a`)", bind.ID) - s.cleanBindingEnv(tk) + utilCleanBindingEnv(tk, dom) tk.MustExec("create global binding for select * from t where a > 10 using select /*+ use_index(@qb1 t, idx_a) qb_name(qb1) */ * from t where a > 10") bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind = bindData.Bindings[0] - c.Assert(bind.ID, Equals, "use_index(@`sel_1` `test`.`t` `idx_a`)") + require.Equal(t, "use_index(@`sel_1` `test`.`t` `idx_a`)", bind.ID) - s.cleanBindingEnv(tk) + utilCleanBindingEnv(tk, dom) tk.MustExec("create global binding for select * from t where a > 10 using select /*+ use_index(T, IDX_A) */ * from t where a > 10") bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind = bindData.Bindings[0] - c.Assert(bind.ID, Equals, "use_index(@`sel_1` `test`.`t` `idx_a`)") + require.Equal(t, "use_index(@`sel_1` `test`.`t` `idx_a`)", bind.ID) - s.cleanBindingEnv(tk) + utilCleanBindingEnv(tk, dom) err := tk.ExecToErr("create global binding for select * from t using select /*+ non_exist_hint() */ * from t") - c.Assert(terror.ErrorEqual(err, parser.ErrWarnOptimizerHintParseError), IsTrue) + require.True(t, terror.ErrorEqual(err, parser.ErrParse)) tk.MustExec("create global binding for select * from t where a > 10 using select * from t where a > 10") bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) bind = bindData.Bindings[0] - c.Assert(bind.ID, Equals, "") + require.Equal(t, "", bind.ID) } -func (s *testSuite) TestCapturePlanBaselineIgnoreTiFlash(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int, key(a), key(b))") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from t") - tk.MustExec("select * from t") - // Create virtual tiflash replica info. - dom := domain.GetDomain(tk.Se) - is := dom.InfoSchema() - db, exists := is.SchemaByName(model.NewCIStr("test")) - c.Assert(exists, IsTrue) - for _, tblInfo := range db.Tables { - if tblInfo.Name.L == "t" { - tblInfo.TiFlashReplica = &model.TiFlashReplicaInfo{ - Count: 1, - Available: true, - } - } - } - // Here the plan is the TiFlash plan. - rows := tk.MustQuery("explain select * from t").Rows() - c.Assert(fmt.Sprintf("%v", rows[len(rows)-1][2]), Equals, "cop[tiflash]") +func TestNotEvolvePlanForReadStorageHint(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() - tk.MustQuery("show global bindings").Check(testkit.Rows()) - tk.MustExec("admin capture bindings") - // Don't have the TiFlash plan even we have TiFlash replica. - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t`") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t`") -} + store, clean := testkit.CreateMockStore(t) + defer clean() -func (s *testSuite) TestNotEvolvePlanForReadStorageHint(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, index idx_a(a), index idx_b(b))") tk.MustExec("insert into t values (1,1), (2,2), (3,3), (4,4), (5,5), (6,6), (7,7), (8,8), (9,9), (10,10)") tk.MustExec("analyze table t") // Create virtual tiflash replica info. - dom := domain.GetDomain(tk.Se) + dom := domain.GetDomain(tk.Session()) is := dom.InfoSchema() db, exists := is.SchemaByName(model.NewCIStr("test")) - c.Assert(exists, IsTrue) + require.True(t, exists) for _, tblInfo := range db.Tables { if tblInfo.Name.L == "t" { tblInfo.TiFlashReplica = &model.TiFlashReplicaInfo{ @@ -1655,36 +896,38 @@ func (s *testSuite) TestNotEvolvePlanForReadStorageHint(c *C) { // Make sure the best plan of the SQL is use TiKV index. tk.MustExec("set @@session.tidb_executor_concurrency = 4;") rows := tk.MustQuery("explain select * from t where a >= 11 and b >= 11").Rows() - c.Assert(fmt.Sprintf("%v", rows[len(rows)-1][2]), Equals, "cop[tikv]") + require.Equal(t, "cop[tikv]", fmt.Sprintf("%v", rows[len(rows)-1][2])) tk.MustExec("create global binding for select * from t where a >= 1 and b >= 1 using select /*+ read_from_storage(tiflash[t]) */ * from t where a >= 1 and b >= 1") tk.MustExec("set @@tidb_evolve_plan_baselines=1") // Even if index of TiKV has lower cost, it chooses TiFlash. rows = tk.MustQuery("explain select * from t where a >= 11 and b >= 11").Rows() - c.Assert(fmt.Sprintf("%v", rows[len(rows)-1][2]), Equals, "cop[tiflash]") + require.Equal(t, "cop[tiflash]", fmt.Sprintf("%v", rows[len(rows)-1][2])) tk.MustExec("admin flush bindings") rows = tk.MustQuery("show global bindings").Rows() // None evolve task, because of the origin binding is a read_from_storage binding. - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][1], Equals, "SELECT /*+ read_from_storage(tiflash[`t`])*/ * FROM `test`.`t` WHERE `a` >= 1 AND `b` >= 1") - c.Assert(rows[0][3], Equals, "using") + require.Len(t, rows, 1) + require.Equal(t, "SELECT /*+ read_from_storage(tiflash[`t`])*/ * FROM `test`.`t` WHERE `a` >= 1 AND `b` >= 1", rows[0][1]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) } -func (s *testSuite) TestBindingWithIsolationRead(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestBindingWithIsolationRead(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, index idx_a(a), index idx_b(b))") tk.MustExec("insert into t values (1,1), (2,2), (3,3), (4,4), (5,5), (6,6), (7,7), (8,8), (9,9), (10,10)") tk.MustExec("analyze table t") // Create virtual tiflash replica info. - dom := domain.GetDomain(tk.Se) + dom := domain.GetDomain(tk.Session()) is := dom.InfoSchema() db, exists := is.SchemaByName(model.NewCIStr("test")) - c.Assert(exists, IsTrue) + require.True(t, exists) for _, tblInfo := range db.Tables { if tblInfo.Name.L == "t" { tblInfo.TiFlashReplica = &model.TiFlashReplicaInfo{ @@ -1696,16 +939,24 @@ func (s *testSuite) TestBindingWithIsolationRead(c *C) { tk.MustExec("create global binding for select * from t where a >= 1 and b >= 1 using select * from t use index(idx_a) where a >= 1 and b >= 1") tk.MustExec("set @@tidb_use_plan_baselines = 1") rows := tk.MustQuery("explain select * from t where a >= 11 and b >= 11").Rows() - c.Assert(rows[len(rows)-1][2], Equals, "cop[tikv]") + require.Equal(t, "cop[tikv]", rows[len(rows)-1][2]) // Even if we build a binding use index for SQL, but after we set the isolation read for TiFlash, it choose TiFlash instead of index of TiKV. tk.MustExec("set @@tidb_isolation_read_engines = \"tiflash\"") rows = tk.MustQuery("explain select * from t where a >= 11 and b >= 11").Rows() - c.Assert(rows[len(rows)-1][2], Equals, "cop[tiflash]") + require.Equal(t, "cop[tiflash]", rows[len(rows)-1][2]) } -func (s *testSuite) TestReCreateBindAfterEvolvePlan(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestReCreateBindAfterEvolvePlan(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b), index idx_c(c))") @@ -1716,24 +967,26 @@ func (s *testSuite) TestReCreateBindAfterEvolvePlan(c *C) { // It cannot choose table path although it has lowest cost. tk.MustQuery("select * from t where a >= 0 and b >= 0") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_a") + require.Equal(t, "t:idx_a", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) tk.MustExec("admin flush bindings") rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 0 AND `b` >= 0") - c.Assert(rows[1][3], Equals, "pending verify") + require.Len(t, rows, 2) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` >= 0 AND `b` >= 0", rows[0][1]) + require.Equal(t, "pending verify", rows[0][3]) tk.MustExec("create global binding for select * from t where a >= 1 and b >= 1 using select * from t use index(idx_b) where a >= 1 and b >= 1") rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) + require.Len(t, rows, 1) tk.MustQuery("select * from t where a >= 4 and b >= 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) } -func (s *testSuite) TestInvisibleIndex(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestInvisibleIndex(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, unique idx_a(a), index idx_b(b) invisible)") @@ -1745,322 +998,144 @@ func (s *testSuite) TestInvisibleIndex(c *C) { tk.MustExec("create global binding for select * from t using select * from t use index(idx_a) ") tk.MustQuery("select * from t") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_a") - c.Assert(tk.MustUseIndex("select * from t", "idx_a(a)"), IsTrue) + require.Equal(t, "t:idx_a", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("select * from t", "idx_a(a)")) tk.MustExec(`prepare stmt1 from 'select * from t'`) tk.MustExec("execute stmt1") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 1) - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_a") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_a", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) // And then make this index invisible tk.MustExec("alter table t alter index idx_a invisible") tk.MustQuery("select * from t") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 0) + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 0) tk.MustExec("execute stmt1") - c.Assert(len(tk.Se.GetSessionVars().StmtCtx.IndexNames), Equals, 0) + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 0) tk.MustExec("drop binding for select * from t") } -func (s *testSuite) TestBindingSource(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, index idx_a(a))") - - // Test Source for SQL created sql - tk.MustExec("create global binding for select * from t where a > 10 using select * from t ignore index(idx_a) where a > 10") - bindHandle := s.domain.BindHandle() - sql, hash := normalizeWithDefaultDB(c, "select * from t where a > ?", "test") - bindData := bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 1) - bind := bindData.Bindings[0] - c.Assert(bind.Source, Equals, bindinfo.Manual) - - // Test Source for evolved sql - tk.MustExec("set @@tidb_evolve_plan_baselines=1") - tk.MustQuery("select * from t where a > 10") - bindHandle.SaveEvolveTasksToStore() - sql, hash = normalizeWithDefaultDB(c, "select * from t where a > ?", "test") - bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` > ?") - c.Assert(len(bindData.Bindings), Equals, 2) - bind = bindData.Bindings[1] - c.Assert(bind.Source, Equals, bindinfo.Evolve) - tk.MustExec("set @@tidb_evolve_plan_baselines=0") - - // Test Source for captured sqls - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec("set @@tidb_capture_plan_baselines = on") - defer func() { - tk.MustExec("set @@tidb_capture_plan_baselines = off") - }() - tk.MustExec("use test") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from t ignore index(idx_a) where a < 10") - tk.MustExec("select * from t ignore index(idx_a) where a < 10") - tk.MustExec("admin capture bindings") - bindHandle.CaptureBaselines() - sql, hash = normalizeWithDefaultDB(c, "select * from t where a < ?", "test") - bindData = bindHandle.GetBindRecord(hash, sql, "test") - c.Check(bindData, NotNil) - c.Check(bindData.OriginalSQL, Equals, "select * from `test` . `t` where `a` < ?") - c.Assert(len(bindData.Bindings), Equals, 1) - bind = bindData.Bindings[0] - c.Assert(bind.Source, Equals, bindinfo.Capture) -} +func TestSPMHitInfo(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() -func (s *testSuite) TestSPMHitInfo(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t1") tk.MustExec("drop table if exists t2") tk.MustExec("create table t1(id int)") tk.MustExec("create table t2(id int)") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin"), IsTrue) - c.Assert(tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + require.True(t, tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin")) tk.MustExec("SELECT * from t1,t2 where t1.id = t2.id") tk.MustQuery(`select @@last_plan_from_binding;`).Check(testkit.Rows("0")) tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") - c.Assert(tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin"), IsTrue) + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) tk.MustExec("SELECT * from t1,t2 where t1.id = t2.id") tk.MustQuery(`select @@last_plan_from_binding;`).Check(testkit.Rows("1")) tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") } -func (s *testSuite) TestIssue19836(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, key (a));") - tk.MustExec("CREATE SESSION BINDING FOR select * from t where a = 1 limit 5, 5 USING select * from t ignore index (a) where a = 1 limit 5, 5;") - tk.MustExec("PREPARE stmt FROM 'select * from t where a = 40 limit ?, ?';") - tk.MustExec("set @a=1;") - tk.MustExec("set @b=2;") - tk.MustExec("EXECUTE stmt USING @a, @b;") - tk.Se.SetSessionManager(&mockSessionManager{ - PS: []*util.ProcessInfo{tk.Se.ShowProcess()}, - }) - explainResult := testkit.Rows( - "Limit_8 2.00 0 root time:0s, loops:0 offset:1, count:2 N/A N/A", - "└─TableReader_14 3.00 0 root time:0s, loops:0 data:Limit_13 N/A N/A", - " └─Limit_13 3.00 0 cop[tikv] offset:0, count:3 N/A N/A", - " └─Selection_12 3.00 0 cop[tikv] eq(test.t.a, 40) N/A N/A", - " └─TableFullScan_11 3000.00 0 cop[tikv] table:t keep order:false, stats:pseudo N/A N/A", - ) - tk.MustQuery("explain for connection " + strconv.FormatUint(tk.Se.ShowProcess().ID, 10)).Check(explainResult) -} +func TestReCreateBind(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() -func (s *testSuite) TestReCreateBind(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, index idx(a))") - tk.MustQuery("select * from mysql.bind_info").Check(testkit.Rows()) + tk.MustQuery("select * from mysql.bind_info where source != 'builtin'").Check(testkit.Rows()) tk.MustQuery("show global bindings").Check(testkit.Rows()) tk.MustExec("create global binding for select * from t using select * from t") - tk.MustQuery("select original_sql, status from mysql.bind_info").Check(testkit.Rows( - "select * from `test` . `t` using", + tk.MustQuery("select original_sql, status from mysql.bind_info where source != 'builtin';").Check(testkit.Rows( + "select * from `test` . `t` enabled", )) rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t`") - c.Assert(rows[0][3], Equals, "using") + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t`", rows[0][0]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) tk.MustExec("create global binding for select * from t using select * from t") - tk.MustQuery("select original_sql, status from mysql.bind_info").Check(testkit.Rows( - "select * from `test` . `t` using", - )) rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t`") - c.Assert(rows[0][3], Equals, "using") -} + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t`", rows[0][0]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) -func (s *testSuite) TestDMLIndexHintBind(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") - - tk.MustExec("delete from t where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_b") - c.Assert(tk.MustUseIndex("delete from t where b = 1 and c > 1", "idx_b(b)"), IsTrue) - tk.MustExec("create global binding for delete from t where b = 1 and c > 1 using delete from t use index(idx_c) where b = 1 and c > 1") - tk.MustExec("delete from t where b = 1 and c > 1") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idx_c") - c.Assert(tk.MustUseIndex("delete from t where b = 1 and c > 1", "idx_c(c)"), IsTrue) + rows = tk.MustQuery("select original_sql, status from mysql.bind_info where source != 'builtin';").Rows() + require.Len(t, rows, 2) + require.Equal(t, "deleted", rows[0][1]) + require.Equal(t, bindinfo.Enabled, rows[1][1]) } -func (s *testSuite) TestCapturedBindingCharset(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("use test") - tk.MustExec("create table t(name varchar(25), index idx(name))") +func TestExplainShowBindSQL(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() - tk.MustExec("set character_set_connection = 'ascii'") - tk.MustExec("update t set name = 'hello' where name <= 'abc'") - tk.MustExec("update t set name = 'hello' where name <= 'abc'") - tk.MustExec("set character_set_connection = 'utf8mb4'") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "update `test` . `t` set `name` = ? where `name` <= ?") - c.Assert(rows[0][1], Equals, "UPDATE /*+ use_index(@`upd_1` `test`.`t` `idx`)*/ `test`.`t` SET `name`='hello' WHERE `name` <= 'abc'") - // Charset and Collation are empty now, they are not used currently. - c.Assert(rows[0][6], Equals, "") - c.Assert(rows[0][7], Equals, "") -} - -func (s *testSuite) TestConcurrentCapture(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - // Simulate an existing binding generated by concurrent CREATE BINDING, which has not been synchronized to current tidb-server yet. - // Actually, it is more common to be generated by concurrent baseline capture, I use Manual just for simpler test verification. - tk.MustExec("insert into mysql.bind_info values('select * from `test` . `t`', 'select * from `test` . `t`', '', 'using', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + - bindinfo.Manual + "')") - tk.MustQuery("select original_sql, source from mysql.bind_info where source != 'builtin'").Check(testkit.Rows( - "select * from `test` . `t` manual", - )) - stmtsummary.StmtSummaryByDigestMap.Clear() + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int, b int)") - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from t") - tk.MustExec("select * from t") - tk.MustExec("admin capture bindings") - tk.MustQuery("select original_sql, source from mysql.bind_info where source != 'builtin'").Check(testkit.Rows( - "select * from `test` . `t` capture", + tk.MustExec("create table t(a int, b int, key(a))") + + tk.MustExec("create global binding for select * from t using select * from t use index(a)") + tk.MustQuery("select original_sql, bind_sql from mysql.bind_info where default_db != 'mysql'").Check(testkit.Rows( + "select * from `test` . `t` SELECT * FROM `test`.`t` USE INDEX (`a`)", )) -} -func (s *testSuite) TestUpdateSubqueryCapture(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t1, t2") - tk.MustExec("create table t1(a int, b int, c int, key idx_b(b))") - tk.MustExec("create table t2(a int, b int)") - stmtsummary.StmtSummaryByDigestMap.Clear() - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("update t1 set b = 1 where b = 2 and (a in (select a from t2 where b = 1) or c in (select a from t2 where b = 1))") - tk.MustExec("update t1 set b = 1 where b = 2 and (a in (select a from t2 where b = 1) or c in (select a from t2 where b = 1))") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - bindSQL := "UPDATE /*+ use_index(@`upd_1` `test`.`t1` `idx_b`), use_index(@`sel_1` `test`.`t2` ), hash_join(@`upd_1` `test`.`t1`), use_index(@`sel_2` `test`.`t2` )*/ `test`.`t1` SET `b`=1 WHERE `b` = 2 AND (`a` IN (SELECT `a` FROM `test`.`t2` WHERE `b` = 1) OR `c` IN (SELECT `a` FROM `test`.`t2` WHERE `b` = 1))" - c.Assert(rows[0][1], Equals, bindSQL) - tk.MustExec(bindSQL) - c.Assert(tk.Se.GetSessionVars().StmtCtx.GetWarnings(), HasLen, 0) + tk.MustExec("explain format = 'verbose' select * from t") + tk.MustQuery("show warnings").Check(testkit.Rows("Note 1105 Using the bindSQL: SELECT * FROM `test`.`t` USE INDEX (`a`)")) + // explain analyze do not support verbose yet. } -func (s *testSuite) TestIssue20417(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestDMLIndexHintBind(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec(`CREATE TABLE t ( - pk VARBINARY(36) NOT NULL PRIMARY KEY, - b BIGINT NOT NULL, - c BIGINT NOT NULL, - pad VARBINARY(2048), - INDEX idxb(b), - INDEX idxc(c) - )`) - - // Test for create binding - s.cleanBindingEnv(tk) - tk.MustExec("create global binding for select * from t using select /*+ use_index(t, idxb) */ * from t") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t`") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(`t` `idxb`)*/ * FROM `test`.`t`") - c.Assert(tk.MustUseIndex("select * from t", "idxb(b)"), IsTrue) - c.Assert(tk.MustUseIndex("select * from test.t", "idxb(b)"), IsTrue) - - tk.MustExec("create global binding for select * from t WHERE b=2 AND c=3924541 using select /*+ use_index(@sel_1 test.t idxb) */ * from t WHERE b=2 AND c=3924541") - c.Assert(tk.MustUseIndex("SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `test`.`t` WHERE `b`=2 AND `c`=3924541", "idxb(b)"), IsTrue) - c.Assert(tk.MustUseIndex("SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `t` WHERE `b`=2 AND `c`=3924541", "idxb(b)"), IsTrue) - - // Test for capture baseline - s.cleanBindingEnv(tk) - stmtsummary.StmtSummaryByDigestMap.Clear() - tk.MustExec("set @@tidb_capture_plan_baselines = on") - s.domain.BindHandle().CaptureBaselines() - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("select * from t where b=2 and c=213124") - tk.MustExec("select * from t where b=2 and c=213124") - tk.MustExec("admin capture bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `b` = ? and `c` = ?") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxb`)*/ * FROM `test`.`t` WHERE `b` = 2 AND `c` = 213124") - tk.MustExec("set @@tidb_capture_plan_baselines = off") + tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") - // Test for evolve baseline - s.cleanBindingEnv(tk) - tk.MustExec("set @@tidb_evolve_plan_baselines=1") - tk.MustExec("create global binding for select * from t WHERE c=3924541 using select /*+ use_index(@sel_1 test.t idxb) */ * from t WHERE c=3924541") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `c` = ?") - c.Assert(rows[0][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxb`)*/ * FROM `test`.`t` WHERE `c` = 3924541") - tk.MustExec("select /*+ use_index(t idxc)*/ * from t where c=3924541") - c.Assert(tk.Se.GetSessionVars().StmtCtx.IndexNames[0], Equals, "t:idxb") - tk.MustExec("admin flush bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][0], Equals, "select * from `test` . `t` where `c` = ?") - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`), use_index(`t` `idxc`)*/ * FROM `test`.`t` WHERE `c` = 3924541") - c.Assert(rows[1][3], Equals, "pending verify") - tk.MustExec("admin evolve bindings") - rows = tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 2) - c.Assert(rows[1][0], Equals, "select * from `test` . `t` where `c` = ?") - c.Assert(rows[1][1], Equals, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`), use_index(`t` `idxc`)*/ * FROM `test`.`t` WHERE `c` = 3924541") - status := rows[1][3].(string) - c.Assert(status == "using" || status == "rejected", IsTrue) - tk.MustExec("set @@tidb_evolve_plan_baselines=0") + tk.MustExec("delete from t where b = 1 and c > 1") + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("delete from t where b = 1 and c > 1", "idx_b(b)")) + tk.MustExec("create global binding for delete from t where b = 1 and c > 1 using delete from t use index(idx_c) where b = 1 and c > 1") + tk.MustExec("delete from t where b = 1 and c > 1") + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + require.True(t, tk.MustUseIndex("delete from t where b = 1 and c > 1", "idx_c(c)")) } -func (s *testSuite) TestCaptureWithZeroSlowLogThreshold(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - tk.MustExec("use test") - tk.MustExec("drop table if exists t") - tk.MustExec("create table t(a int)") - stmtsummary.StmtSummaryByDigestMap.Clear() - c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) - tk.MustExec("set tidb_slow_log_threshold = 0") - tk.MustExec("select * from t") - tk.MustExec("select * from t") - tk.MustExec("set tidb_slow_log_threshold = 300") - tk.MustExec("admin capture bindings") - rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t`") +func TestForbidEvolvePlanBaseLinesBeforeGA(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = false + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + err := tk.ExecToErr("set @@tidb_evolve_plan_baselines=0") + require.Equal(t, nil, err) + err = tk.ExecToErr("set @@TiDB_Evolve_pLan_baselines=1") + require.EqualError(t, err, "Cannot enable baseline evolution feature, it is not generally available now") + err = tk.ExecToErr("set @@TiDB_Evolve_pLan_baselines=oN") + require.EqualError(t, err, "Cannot enable baseline evolution feature, it is not generally available now") + err = tk.ExecToErr("admin evolve bindings") + require.EqualError(t, err, "Cannot enable baseline evolution feature, it is not generally available now") } -func (s *testSuite) TestExplainTableStmts(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestExplainTableStmts(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(id int, value decimal(5,2))") @@ -2069,48 +1144,102 @@ func (s *testSuite) TestExplainTableStmts(c *C) { tk.MustExec("desc table t") } -func (s *testSuite) TestSPMWithoutUseDatabase(c *C) { - tk := testkit.NewTestKit(c, s.store) - tk1 := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) - s.cleanBindingEnv(tk1) +func TestSPMWithoutUseDatabase(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk1 := testkit.NewTestKit(t, store) + utilCleanBindingEnv(tk, dom) + utilCleanBindingEnv(tk1, dom) + tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, key(a))") tk.MustExec("create global binding for select * from t using select * from t force index(a)") err := tk1.ExecToErr("select * from t") - c.Assert(err, ErrorMatches, "*No database selected") + require.Error(t, err) + require.Regexp(t, "No database selected$", err) tk1.MustQuery(`select @@last_plan_from_binding;`).Check(testkit.Rows("0")) - c.Assert(tk1.MustUseIndex("select * from test.t", "a"), IsTrue) + require.True(t, tk1.MustUseIndex("select * from test.t", "a")) tk1.MustExec("select * from test.t") tk1.MustQuery(`select @@last_plan_from_binding;`).Check(testkit.Rows("1")) } -func (s *testSuite) TestBindingWithoutCharset(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestBindingWithoutCharset(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t (a varchar(10) CHARACTER SET utf8)") tk.MustExec("create global binding for select * from t where a = 'aa' using select * from t where a = 'aa'") rows := tk.MustQuery("show global bindings").Rows() - c.Assert(len(rows), Equals, 1) - c.Assert(rows[0][0], Equals, "select * from `test` . `t` where `a` = ?") - c.Assert(rows[0][1], Equals, "SELECT * FROM `test`.`t` WHERE `a` = 'aa'") + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` = ?", rows[0][0]) + require.Equal(t, "SELECT * FROM `test`.`t` WHERE `a` = 'aa'", rows[0][1]) } -func (s *testSuite) TestTemporaryTable(c *C) { - tk := testkit.NewTestKit(c, s.store) - s.cleanBindingEnv(tk) +func TestBindingWithMultiParenthesis(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") - tk.MustExec("set tidb_enable_global_temporary_table = true") - tk.MustExec("create global temporary table t(a int, b int, key(a), key(b)) on commit delete rows") - tk.MustExec("create table t2(a int, b int, key(a), key(b))") - tk.MustGetErrCode("create session binding for select * from t where b = 123 using select * from t ignore index(b) where b = 123;", errno.ErrOptOnTemporaryTable) - tk.MustGetErrCode("create binding for insert into t select * from t2 where t2.b = 1 and t2.c > 1 using insert into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) - tk.MustGetErrCode("create binding for replace into t select * from t2 where t2.b = 1 and t2.c > 1 using replace into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) - tk.MustGetErrCode("create binding for update t set a = 1 where b = 1 and c > 1 using update /*+ use_index(t, c) */ t set a = 1 where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) - tk.MustGetErrCode("create binding for delete from t where b = 1 and c > 1 using delete /*+ use_index(t, c) */ from t where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) + tk.MustExec("create table t (a int)") + tk.MustExec("create global binding for select * from (select * from t where a = 1) tt using select * from (select * from t where a = 1) tt") + tk.MustExec("create global binding for select * from ((select * from t where a = 1)) tt using select * from (select * from t where a = 1) tt") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from ( select * from `test` . `t` where `a` = ? ) as `tt`", rows[0][0]) + require.Equal(t, "SELECT * FROM (SELECT * FROM `test`.`t` WHERE `a` = 1) AS `tt`", rows[0][1]) +} + +func TestGCBindRecord(t *testing.T) { + // set lease for gc tests + originLease := bindinfo.Lease + bindinfo.Lease = 0 + defer func() { + bindinfo.Lease = originLease + }() + + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, key(a))") + + tk.MustExec("create global binding for select * from t where a = 1 using select * from t use index(a) where a = 1") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` = ?", rows[0][0]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) + tk.MustQuery("select status from mysql.bind_info where original_sql = 'select * from `test` . `t` where `a` = ?'").Check(testkit.Rows( + bindinfo.Enabled, + )) + + h := dom.BindHandle() + // bindinfo.Lease is set to 0 for test env in SetUpSuite. + require.NoError(t, h.GCBindRecord()) + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` = ?", rows[0][0]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) + tk.MustQuery("select status from mysql.bind_info where original_sql = 'select * from `test` . `t` where `a` = ?'").Check(testkit.Rows( + bindinfo.Enabled, + )) + + tk.MustExec("drop global binding for select * from t where a = 1") + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustQuery("select status from mysql.bind_info where original_sql = 'select * from `test` . `t` where `a` = ?'").Check(testkit.Rows( + "deleted", + )) + require.NoError(t, h.GCBindRecord()) + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustQuery("select status from mysql.bind_info where original_sql = 'select * from `test` . `t` where `a` = ?'").Check(testkit.Rows()) } diff --git a/bindinfo/capture_test.go b/bindinfo/capture_test.go new file mode 100644 index 0000000000000..5afce959a3b00 --- /dev/null +++ b/bindinfo/capture_test.go @@ -0,0 +1,908 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/pingcap/tidb/bindinfo" + "github.com/pingcap/tidb/config" + "github.com/pingcap/tidb/domain" + "github.com/pingcap/tidb/parser/auth" + "github.com/pingcap/tidb/parser/model" + "github.com/pingcap/tidb/testkit" + "github.com/pingcap/tidb/util/stmtsummary" + "github.com/stretchr/testify/require" +) + +func TestDMLCapturePlanBaseline(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec(" SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") + tk.MustExec("create table t1 like t") + dom.BindHandle().CaptureBaselines() + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustExec("delete from t where b = 1 and c > 1") + tk.MustExec("delete from t where b = 1 and c > 1") + tk.MustExec("update t set a = 1 where b = 1 and c > 1") + tk.MustExec("update t set a = 1 where b = 1 and c > 1") + tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("insert into t1 values(1,1,1)") + tk.MustExec("insert into t1 values(1,1,1)") + tk.MustExec("replace into t1 values(1,1,1)") + tk.MustExec("replace into t1 values(1,1,1)") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("delete from t where b = 1 and c > 1") + tk.MustExec("delete from t where b = 1 and c > 1") + tk.MustExec("update t set a = 1 where b = 1 and c > 1") + tk.MustExec("update t set a = 1 where b = 1 and c > 1") + tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("insert into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("replace into t1 select * from t where t.b = 1 and t.c > 1") + tk.MustExec("insert into t1 values(1,1,1)") + tk.MustExec("insert into t1 values(1,1,1)") + tk.MustExec("replace into t1 values(1,1,1)") + tk.MustExec("replace into t1 values(1,1,1)") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, 4) + require.Equal(t, "delete from `test` . `t` where `b` = ? and `c` > ?", rows[0][0]) + require.Equal(t, "DELETE /*+ use_index(@`del_1` `test`.`t` `idx_b`)*/ FROM `test`.`t` WHERE `b` = 1 AND `c` > 1", rows[0][1]) + require.Equal(t, "insert into `test` . `t1` select * from `test` . `t` where `t` . `b` = ? and `t` . `c` > ?", rows[1][0]) + require.Equal(t, "INSERT INTO `test`.`t1` SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_b`)*/ * FROM `test`.`t` WHERE `t`.`b` = 1 AND `t`.`c` > 1", rows[1][1]) + require.Equal(t, "replace into `test` . `t1` select * from `test` . `t` where `t` . `b` = ? and `t` . `c` > ?", rows[2][0]) + require.Equal(t, "REPLACE INTO `test`.`t1` SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_b`)*/ * FROM `test`.`t` WHERE `t`.`b` = 1 AND `t`.`c` > 1", rows[2][1]) + require.Equal(t, "update `test` . `t` set `a` = ? where `b` = ? and `c` > ?", rows[3][0]) + require.Equal(t, "UPDATE /*+ use_index(@`upd_1` `test`.`t` `idx_b`)*/ `test`.`t` SET `a`=1 WHERE `b` = 1 AND `c` > 1", rows[3][1]) +} + +func TestCapturePlanBaseline(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int)") + dom.BindHandle().CaptureBaselines() + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustExec("select count(*) from t where a > 10") + tk.MustExec("select count(*) from t where a > 10") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t` WHERE `a` > 10", rows[0][1]) +} + +func TestCaptureDBCaseSensitivity(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("drop database if exists SPM") + tk.MustExec("create database SPM") + tk.MustExec("use SPM") + tk.MustExec("create table t(a int, b int, key(b))") + tk.MustExec("create global binding for select * from t using select /*+ use_index(t) */ * from t") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select /*+ use_index(t,b) */ * from t") + tk.MustExec("select /*+ use_index(t,b) */ * from t") + tk.MustExec("admin capture bindings") + // The capture should ignore the case sensitivity for DB name when checking if any binding exists, + // so there would be no new binding captured. + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "SELECT /*+ use_index(`t` )*/ * FROM `SPM`.`t`", rows[0][1]) + require.Equal(t, "manual", rows[0][8]) +} + +func TestCaptureBaselinesDefaultDB(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + tk.MustExec("drop database if exists spm") + tk.MustExec("create database spm") + tk.MustExec("create table spm.t(a int, index idx_a(a))") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from spm.t ignore index(idx_a) where a > 10") + tk.MustExec("select * from spm.t ignore index(idx_a) where a > 10") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + // Default DB should be "" when all columns have explicit database name. + require.Equal(t, "", rows[0][2]) + require.Equal(t, bindinfo.Enabled, rows[0][3]) + tk.MustExec("use spm") + tk.MustExec("select * from spm.t where a > 10") + // Should use TableScan because of the "ignore index" binding. + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 0) +} + +func TestCapturePreparedStmt(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, c int, key idx_b(b), key idx_c(c))") + require.True(t, tk.MustUseIndex("select * from t where b = 1 and c > 1", "idx_b(b)")) + tk.MustExec("prepare stmt from 'select /*+ use_index(t,idx_c) */ * from t where b = ? and c > ?'") + tk.MustExec("set @p = 1") + tk.MustExec("execute stmt using @p, @p") + tk.MustExec("execute stmt using @p, @p") + + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `b` = ? and `c` > ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`)*/ * FROM `test`.`t` WHERE `b` = ? AND `c` > ?", rows[0][1]) + + require.True(t, tk.MustUseIndex("select /*+ use_index(t,idx_b) */ * from t where b = 1 and c > 1", "idx_c(c)")) + tk.MustExec("admin flush bindings") + tk.MustExec("admin evolve bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `b` = ? and `c` > ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idx_c`)*/ * FROM `test`.`t` WHERE `b` = ? AND `c` > ?", rows[0][1]) +} + +func TestCapturePlanBaselineIgnoreTiFlash(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, key(a), key(b))") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t") + tk.MustExec("select * from t") + // Create virtual tiflash replica info. + domSession := domain.GetDomain(tk.Session()) + is := domSession.InfoSchema() + db, exists := is.SchemaByName(model.NewCIStr("test")) + require.True(t, exists) + for _, tblInfo := range db.Tables { + if tblInfo.Name.L == "t" { + tblInfo.TiFlashReplica = &model.TiFlashReplicaInfo{ + Count: 1, + Available: true, + } + } + } + // Here the plan is the TiFlash plan. + rows := tk.MustQuery("explain select * from t").Rows() + require.Equal(t, "cop[tiflash]", fmt.Sprintf("%v", rows[len(rows)-1][2])) + + tk.MustQuery("show global bindings").Check(testkit.Rows()) + tk.MustExec("admin capture bindings") + // Don't have the TiFlash plan even we have TiFlash replica. + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t`", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t`", rows[0][1]) +} + +func TestBindingSource(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, index idx_a(a))") + + // Test Source for SQL created sql + tk.MustExec("create global binding for select * from t where a > 10 using select * from t ignore index(idx_a) where a > 10") + bindHandle := dom.BindHandle() + sql, hash := utilNormalizeWithDefaultDB(t, "select * from t where a > ?", "test") + bindData := bindHandle.GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) + bind := bindData.Bindings[0] + require.Equal(t, bindinfo.Manual, bind.Source) + + // Test Source for evolved sql + tk.MustExec("set @@tidb_evolve_plan_baselines=1") + tk.MustQuery("select * from t where a > 10") + bindHandle.SaveEvolveTasksToStore() + sql, hash = utilNormalizeWithDefaultDB(t, "select * from t where a > ?", "test") + bindData = bindHandle.GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` > ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 2) + bind = bindData.Bindings[1] + require.Equal(t, bindinfo.Evolve, bind.Source) + tk.MustExec("set @@tidb_evolve_plan_baselines=0") + + // Test Source for captured sqls + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t ignore index(idx_a) where a < 10") + tk.MustExec("select * from t ignore index(idx_a) where a < 10") + tk.MustExec("admin capture bindings") + bindHandle.CaptureBaselines() + sql, hash = utilNormalizeWithDefaultDB(t, "select * from t where a < ?", "test") + bindData = bindHandle.GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t` where `a` < ?", bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 1) + bind = bindData.Bindings[0] + require.Equal(t, bindinfo.Capture, bind.Source) +} + +func TestCapturedBindingCharset(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("use test") + tk.MustExec("create table t(name varchar(25), index idx(name))") + + tk.MustExec("set character_set_connection = 'ascii'") + tk.MustExec("update t set name = 'hello' where name <= 'abc'") + tk.MustExec("update t set name = 'hello' where name <= 'abc'") + tk.MustExec("set character_set_connection = 'utf8mb4'") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "update `test` . `t` set `name` = ? where `name` <= ?", rows[0][0]) + require.Equal(t, "UPDATE /*+ use_index(@`upd_1` `test`.`t` `idx`)*/ `test`.`t` SET `name`='hello' WHERE `name` <= 'abc'", rows[0][1]) + require.Equal(t, "utf8mb4", rows[0][6]) + require.Equal(t, "utf8mb4_bin", rows[0][7]) +} + +func TestConcurrentCapture(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + // Simulate an existing binding generated by concurrent CREATE BINDING, which has not been synchronized to current tidb-server yet. + // Actually, it is more common to be generated by concurrent baseline capture, I use Manual just for simpler test verification. + tk.MustExec("insert into mysql.bind_info values('select * from `test` . `t`', 'select * from `test` . `t`', '', 'enabled', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustQuery("select original_sql, source from mysql.bind_info where source != 'builtin'").Check(testkit.Rows( + "select * from `test` . `t` manual", + )) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int)") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t") + tk.MustExec("select * from t") + tk.MustExec("admin capture bindings") + tk.MustQuery("select original_sql, source, status from mysql.bind_info where source != 'builtin'").Check(testkit.Rows( + "select * from `test` . `t` manual deleted", + "select * from `test` . `t` capture enabled", + )) +} + +func TestUpdateSubqueryCapture(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t1, t2") + tk.MustExec("create table t1(a int, b int, c int, key idx_b(b))") + tk.MustExec("create table t2(a int, b int)") + stmtsummary.StmtSummaryByDigestMap.Clear() + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("update t1 set b = 1 where b = 2 and (a in (select a from t2 where b = 1) or c in (select a from t2 where b = 1))") + tk.MustExec("update t1 set b = 1 where b = 2 and (a in (select a from t2 where b = 1) or c in (select a from t2 where b = 1))") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + bindSQL := "UPDATE /*+ use_index(@`upd_1` `test`.`t1` `idx_b`), use_index(@`sel_1` `test`.`t2` ), hash_join(@`upd_1` `test`.`t1`), use_index(@`sel_2` `test`.`t2` )*/ `test`.`t1` SET `b`=1 WHERE `b` = 2 AND (`a` IN (SELECT `a` FROM `test`.`t2` WHERE `b` = 1) OR `c` IN (SELECT `a` FROM `test`.`t2` WHERE `b` = 1))" + require.Equal(t, bindSQL, rows[0][1]) + tk.MustExec(bindSQL) + require.Len(t, tk.Session().GetSessionVars().StmtCtx.GetWarnings(), 0) +} + +func TestIssue20417(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec(`CREATE TABLE t ( + pk VARBINARY(36) NOT NULL PRIMARY KEY, + b BIGINT NOT NULL, + c BIGINT NOT NULL, + pad VARBINARY(2048), + INDEX idxb(b), + INDEX idxc(c) + )`) + + // Test for create binding + utilCleanBindingEnv(tk, dom) + tk.MustExec("create global binding for select * from t using select /*+ use_index(t, idxb) */ * from t") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t`", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(`t` `idxb`)*/ * FROM `test`.`t`", rows[0][1]) + require.True(t, tk.MustUseIndex("select * from t", "idxb(b)")) + require.True(t, tk.MustUseIndex("select * from test.t", "idxb(b)")) + + tk.MustExec("create global binding for select * from t WHERE b=2 AND c=3924541 using select /*+ use_index(@sel_1 test.t idxb) */ * from t WHERE b=2 AND c=3924541") + require.True(t, tk.MustUseIndex("SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `test`.`t` WHERE `b`=2 AND `c`=3924541", "idxb(b)")) + require.True(t, tk.MustUseIndex("SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `t` WHERE `b`=2 AND `c`=3924541", "idxb(b)")) + + // Test for capture baseline + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + dom.BindHandle().CaptureBaselines() + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t where b=2 and c=213124") + tk.MustExec("select * from t where b=2 and c=213124") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `b` = ? and `c` = ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxb`)*/ * FROM `test`.`t` WHERE `b` = 2 AND `c` = 213124", rows[0][1]) + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + + // Test for evolve baseline + utilCleanBindingEnv(tk, dom) + tk.MustExec("set @@tidb_evolve_plan_baselines=1") + tk.MustExec("create global binding for select * from t WHERE c=3924541 using select /*+ use_index(@sel_1 test.t idxb) */ * from t WHERE c=3924541") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `c` = ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxb`)*/ * FROM `test`.`t` WHERE `c` = 3924541", rows[0][1]) + tk.MustExec("select /*+ use_index(t idxc)*/ * from t where c=3924541") + require.Equal(t, "t:idxb", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("admin flush bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 2) + require.Equal(t, "select * from `test` . `t` where `c` = ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `test`.`t` WHERE `c` = 3924541", rows[0][1]) + require.Equal(t, "pending verify", rows[0][3]) + tk.MustExec("admin evolve bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 2) + require.Equal(t, "select * from `test` . `t` where `c` = ?", rows[0][0]) + require.Equal(t, "SELECT /*+ use_index(@`sel_1` `test`.`t` `idxc`)*/ * FROM `test`.`t` WHERE `c` = 3924541", rows[0][1]) + status := rows[0][3].(string) + require.True(t, status == bindinfo.Enabled || status == bindinfo.Rejected) + tk.MustExec("set @@tidb_evolve_plan_baselines=0") +} + +func TestCaptureWithZeroSlowLogThreshold(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int)") + stmtsummary.StmtSummaryByDigestMap.Clear() + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("set tidb_slow_log_threshold = 0") + tk.MustExec("select * from t") + tk.MustExec("select * from t") + tk.MustExec("set tidb_slow_log_threshold = 300") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t`", rows[0][0]) +} + +func TestIssue25505(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + stmtsummary.StmtSummaryByDigestMap.Clear() + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + defer func() { + tk.MustExec("set tidb_slow_log_threshold = 300") + }() + tk.MustExec("set tidb_slow_log_threshold = 0") + tk.MustExec("create table t (a int(11) default null,b int(11) default null,key b (b),key ba (b))") + tk.MustExec("create table t1 (a int(11) default null,b int(11) default null,key idx_ab (a,b),key idx_a (a),key idx_b (b))") + tk.MustExec("create table t2 (a int(11) default null,b int(11) default null,key idx_ab (a,b),key idx_a (a),key idx_b (b))") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + + spmMap := map[string]string{} + spmMap["with recursive `cte` ( `a` ) as ( select ? union select `a` + ? from `test` . `t1` where `a` < ? ) select * from `cte`"] = + "WITH RECURSIVE `cte` (`a`) AS (SELECT 2 UNION SELECT `a` + 1 FROM `test`.`t1` WHERE `a` < 5) SELECT /*+ use_index(@`sel_3` `test`.`t1` `idx_ab`), hash_agg(@`sel_1`)*/ * FROM `cte`" + spmMap["with recursive `cte1` ( `a` , `b` ) as ( select * from `test` . `t` where `b` = ? union select `a` + ? , `b` + ? from `cte1` where `a` < ? ) select * from `test` . `t`"] = + "WITH RECURSIVE `cte1` (`a`, `b`) AS (SELECT * FROM `test`.`t` WHERE `b` = 1 UNION SELECT `a` + 1,`b` + 1 FROM `cte1` WHERE `a` < 2) SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t`" + spmMap["with `cte1` as ( select * from `test` . `t` ) , `cte2` as ( select ? ) select * from `test` . `t`"] = + "WITH `cte1` AS (SELECT * FROM `test`.`t`), `cte2` AS (SELECT 4) SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t`" + spmMap["with `cte` as ( select * from `test` . `t` where `b` = ? ) select * from `test` . `t`"] = + "WITH `cte` AS (SELECT * FROM `test`.`t` WHERE `b` = 6) SELECT /*+ use_index(@`sel_1` `test`.`t` )*/ * FROM `test`.`t`" + spmMap["with recursive `cte` ( `a` ) as ( select ? union select `a` + ? from `test` . `t1` where `a` > ? ) select * from `cte`"] = + "WITH RECURSIVE `cte` (`a`) AS (SELECT 2 UNION SELECT `a` + 1 FROM `test`.`t1` WHERE `a` > 5) SELECT /*+ use_index(@`sel_3` `test`.`t1` `idx_b`), hash_agg(@`sel_1`)*/ * FROM `cte`" + spmMap["with `cte` as ( with `cte1` as ( select * from `test` . `t2` where `a` > ? and `b` > ? ) select * from `cte1` ) select * from `cte` join `test` . `t1` on `t1` . `a` = `cte` . `a`"] = + "WITH `cte` AS (WITH `cte1` AS (SELECT * FROM `test`.`t2` WHERE `a` > 1 AND `b` > 1) SELECT * FROM `cte1`) SELECT /*+ use_index(@`sel_3` `test`.`t2` `idx_ab`), use_index(@`sel_1` `test`.`t1` `idx_ab`), inl_join(@`sel_1` `test`.`t1`)*/ * FROM `cte` JOIN `test`.`t1` ON `t1`.`a` = `cte`.`a`" + spmMap["with `cte` as ( with `cte1` as ( select * from `test` . `t2` where `a` = ? and `b` = ? ) select * from `cte1` ) select * from `cte` join `test` . `t1` on `t1` . `a` = `cte` . `a`"] = + "WITH `cte` AS (WITH `cte1` AS (SELECT * FROM `test`.`t2` WHERE `a` = 1 AND `b` = 1) SELECT * FROM `cte1`) SELECT /*+ use_index(@`sel_3` `test`.`t2` `idx_a`), use_index(@`sel_1` `test`.`t1` `idx_a`), inl_join(@`sel_1` `test`.`t1`)*/ * FROM `cte` JOIN `test`.`t1` ON `t1`.`a` = `cte`.`a`" + + tk.MustExec("with cte as (with cte1 as (select /*+use_index(t2 idx_a)*/ * from t2 where a = 1 and b = 1) select * from cte1) select /*+use_index(t1 idx_a)*/ * from cte join t1 on t1.a=cte.a;") + tk.MustExec("with cte as (with cte1 as (select /*+use_index(t2 idx_a)*/ * from t2 where a = 1 and b = 1) select * from cte1) select /*+use_index(t1 idx_a)*/ * from cte join t1 on t1.a=cte.a;") + tk.MustExec("with cte as (with cte1 as (select /*+use_index(t2 idx_a)*/ * from t2 where a = 1 and b = 1) select * from cte1) select /*+use_index(t1 idx_a)*/ * from cte join t1 on t1.a=cte.a;") + + tk.MustExec("with cte as (with cte1 as (select * from t2 use index(idx_ab) where a > 1 and b > 1) select * from cte1) select /*+use_index(t1 idx_ab)*/ * from cte join t1 on t1.a=cte.a;") + tk.MustExec("with cte as (with cte1 as (select * from t2 use index(idx_ab) where a > 1 and b > 1) select * from cte1) select /*+use_index(t1 idx_ab)*/ * from cte join t1 on t1.a=cte.a;") + tk.MustExec("with cte as (with cte1 as (select * from t2 use index(idx_ab) where a > 1 and b > 1) select * from cte1) select /*+use_index(t1 idx_ab)*/ * from cte join t1 on t1.a=cte.a;") + + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT a+1 FROM t1 use index(idx_ab) WHERE a < 5) SELECT * FROM cte;") + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT a+1 FROM t1 use index(idx_ab) WHERE a < 5) SELECT * FROM cte;") + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT a+1 FROM t1 use index(idx_ab) WHERE a < 5) SELECT * FROM cte;") + + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT /*+use_index(t1 idx_b)*/ a+1 FROM t1 WHERE a > 5) SELECT * FROM cte;") + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT /*+use_index(t1 idx_b)*/ a+1 FROM t1 WHERE a > 5) SELECT * FROM cte;") + tk.MustExec("WITH RECURSIVE cte(a) AS (SELECT 2 UNION SELECT /*+use_index(t1 idx_b)*/ a+1 FROM t1 WHERE a > 5) SELECT * FROM cte;") + + tk.MustExec("with cte as (select * from t where b=6) select * from t") + tk.MustExec("with cte as (select * from t where b=6) select * from t") + tk.MustExec("with cte as (select * from t where b=6) select * from t") + + tk.MustExec("with cte1 as (select * from t), cte2 as (select 4) select * from t") + tk.MustExec("with cte1 as (select * from t), cte2 as (select 5) select * from t") + tk.MustExec("with cte1 as (select * from t), cte2 as (select 6) select * from t") + + tk.MustExec("with recursive cte1(a,b) as (select * from t where b = 1 union select a+1,b+1 from cte1 where a < 2) select * from t") + tk.MustExec("with recursive cte1(a,b) as (select * from t where b = 1 union select a+1,b+1 from cte1 where a < 2) select * from t") + tk.MustExec("with recursive cte1(a,b) as (select * from t where b = 1 union select a+1,b+1 from cte1 where a < 2) select * from t") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 7) + for _, row := range rows { + str := fmt.Sprintf("%s", row[0]) + require.Equal(t, spmMap[str], row[1]) + } +} + +func TestCaptureUserFilter(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int)") + + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + + // test user filter + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('user', 'root')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) // cannot capture the stmt + + // change another user + tk.MustExec(`create user usr1`) + tk.MustExec(`grant all on *.* to usr1 with grant option`) + tk2 := testkit.NewTestKit(t, store) + tk2.MustExec("use test") + require.True(t, tk2.Session().Auth(&auth.UserIdentity{Username: "usr1", Hostname: "%"}, nil, nil)) + tk2.MustExec("select * from t where a > 10") + tk2.MustExec("select * from t where a > 10") + tk2.MustExec("admin capture bindings") + rows = tk2.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) // can capture the stmt + + // use user-filter with other types of filter together + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('user', 'root')") + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'test.t')") + tk2.MustExec("select * from t where a > 10") + tk2.MustExec("select * from t where a > 10") + tk2.MustExec("admin capture bindings") + rows = tk2.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) // filtered by the table filter +} + +func TestCaptureTableFilterValid(t *testing.T) { + type matchCase struct { + table string + matched bool + } + type filterCase struct { + filter string + valid bool + mcases []matchCase + } + filterCases := []filterCase{ + {"*.*", true, []matchCase{{"db.t", true}}}, + {"***.***", true, []matchCase{{"db.t", true}}}, + {"d*.*", true, []matchCase{{"db.t", true}}}, + {"*.t", true, []matchCase{{"db.t", true}}}, + {"?.t*", true, []matchCase{{"d.t", true}, {"d.tb", true}, {"db.t", false}}}, + {"db.t[1-3]", true, []matchCase{{"db.t1", true}, {"db.t2", true}, {"db.t4", false}}}, + {"!db.table", false, nil}, + {"@db.table", false, nil}, + {"table", false, nil}, + {"", false, nil}, + {"\t ", false, nil}, + } + for _, fc := range filterCases { + f, valid := bindinfo.ParseCaptureTableFilter(fc.filter) + require.Equal(t, fc.valid, valid) + if valid { + for _, mc := range fc.mcases { + tmp := strings.Split(mc.table, ".") + require.Equal(t, mc.matched, f.MatchTable(tmp[0], tmp[1])) + } + } + } +} + +func TestCaptureWildcardFilter(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + dbs := []string{"db11", "db12", "db2"} + tbls := []string{"t11", "t12", "t2"} + for _, db := range dbs { + tk.MustExec(fmt.Sprintf(`drop database if exists %v`, db)) + tk.MustExec(fmt.Sprintf(`create database %v`, db)) + tk.MustExec(fmt.Sprintf(`use %v`, db)) + for _, tbl := range tbls { + tk.MustExec(fmt.Sprintf(`create table %v(a int)`, tbl)) + } + } + mustExecTwice := func() { + for _, db := range dbs { + for _, tbl := range tbls { + tk.MustExec(fmt.Sprintf(`select * from %v.%v where a>10`, db, tbl)) + tk.MustExec(fmt.Sprintf(`select * from %v.%v where a>10`, db, tbl)) + } + } + } + checkBindings := func(dbTbls ...string) { + m := make(map[string]bool) // map[query]existed + for _, dbTbl := range dbTbls { + tmp := strings.Split(dbTbl, ".") + q := fmt.Sprintf("select * from `%v` . `%v` where `a` > ?", tmp[0], tmp[1]) + m[q] = false + } + + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, len(dbTbls)) + for _, r := range rows { + q := r[0].(string) + if _, exist := m[q]; !exist { // encounter an unexpected binding + t.Fatalf("unexpected binding %v", q) + } + m[q] = true + } + for q, exist := range m { + if !exist { // a expected binding is not existed + t.Fatalf("missed binding %v", q) + } + } + } + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'db11.t1*')`) + mustExecTwice() + checkBindings("db11.t2", "db12.t11", "db12.t12", "db12.t2", "db2.t11", "db2.t12", "db2.t2") // db11.t11 and db11.t12 are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'db1*.t11')`) + mustExecTwice() + checkBindings("db11.t12", "db11.t2", "db12.t12", "db12.t2", "db2.t11", "db2.t12", "db2.t2") // db11.t11 and db12.t11 are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'db1*.t1*')`) + mustExecTwice() + checkBindings("db11.t2", "db12.t2", "db2.t11", "db2.t12", "db2.t2") // db11.t11 / db12.t11 / db11.t12 / db12.t12 are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'db1*.*')`) + mustExecTwice() + checkBindings("db2.t11", "db2.t12", "db2.t2") // db11.* / db12.* are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', '*.t1*')`) + mustExecTwice() + checkBindings("db11.t2", "db12.t2", "db2.t2") // *.t11 and *.t12 are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec(`insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'db*.t*')`) + mustExecTwice() + checkBindings() // all are filtered + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + mustExecTwice() + checkBindings("db11.t11", "db11.t12", "db11.t2", "db12.t11", "db12.t12", "db12.t2", "db2.t11", "db2.t12", "db2.t2") // no filter, all can be captured +} + +func TestCaptureFilter(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = on") + defer func() { + tk.MustExec("SET GLOBAL tidb_capture_plan_baselines = off") + }() + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int)") + + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + + // Valid table filter. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'test.t')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `mysql` . `capture_plan_baselines_blacklist`", rows[0][0]) + + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, 2) + require.Equal(t, "select * from `mysql` . `capture_plan_baselines_blacklist`", rows[0][0]) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[1][0]) + + // Invalid table filter. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 't')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + + // Valid database filter. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'mysql.*')") + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, 2) + require.Equal(t, "select * from `mysql` . `capture_plan_baselines_blacklist`", rows[0][0]) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[1][0]) + + // Valid frequency filter. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('frequency', '2')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + + // Invalid frequency filter. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('frequency', '0')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + + // Invalid filter type. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('unknown', 'xx')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + + // Case sensitivity. + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('tABle', 'tESt.T')") + tk.MustExec("select * from t where a > 10") + tk.MustExec("select * from t where a > 10") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `test` . `t` where `a` > ?", rows[0][0]) + + utilCleanBindingEnv(tk, dom) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("insert into mysql.capture_plan_baselines_blacklist(filter_type, filter_value) values('table', 'mySQl.*')") + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("select * from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + + tk.MustExec("delete from mysql.capture_plan_baselines_blacklist") + tk.MustExec("admin capture bindings") + rows = tk.MustQuery("show global bindings").Sort().Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `mysql` . `capture_plan_baselines_blacklist`", rows[0][0]) +} diff --git a/bindinfo/handle.go b/bindinfo/handle.go index 2281af3c88bd3..5a90b16efd094 100644 --- a/bindinfo/handle.go +++ b/bindinfo/handle.go @@ -8,6 +8,7 @@ // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. @@ -23,12 +24,15 @@ import ( "sync/atomic" "time" - "github.com/pingcap/parser" - "github.com/pingcap/parser/ast" - "github.com/pingcap/parser/mysql" - "github.com/pingcap/parser/terror" + tablefilter "github.com/pingcap/tidb-tools/pkg/table-filter" "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" + "github.com/pingcap/tidb/parser/ast" + "github.com/pingcap/tidb/parser/format" + "github.com/pingcap/tidb/parser/mysql" + "github.com/pingcap/tidb/parser/terror" "github.com/pingcap/tidb/sessionctx" + "github.com/pingcap/tidb/sessionctx/stmtctx" "github.com/pingcap/tidb/sessionctx/variable" "github.com/pingcap/tidb/types" driver "github.com/pingcap/tidb/types/parser_driver" @@ -103,7 +107,7 @@ type bindRecordUpdate struct { func NewBindHandle(ctx sessionctx.Context) *BindHandle { handle := &BindHandle{} handle.sctx.Context = ctx - handle.bindInfo.Value.Store(make(cache, 32)) + handle.bindInfo.Value.Store(newBindCache()) handle.bindInfo.parser = parser.New() handle.invalidBindRecordMap.Value.Store(make(map[string]*bindRecordUpdate)) handle.invalidBindRecordMap.flushFunc = func(record *BindRecord) error { @@ -114,6 +118,7 @@ func NewBindHandle(ctx sessionctx.Context) *BindHandle { // BindSQL has already been validated when coming here, so we use nil sctx parameter. return handle.AddBindRecord(nil, record) } + variable.RegisterStatistics(handle) return handle } @@ -127,22 +132,18 @@ func (h *BindHandle) Update(fullLoad bool) (err error) { } exec := h.sctx.Context.(sqlexec.RestrictedSQLExecutor) - stmt, err := exec.ParseWithParams(context.TODO(), `SELECT original_sql, bind_sql, default_db, status, create_time, update_time, charset, collation, source - FROM mysql.bind_info WHERE update_time > %? ORDER BY update_time`, updateTime) - if err != nil { - return err - } - // No need to acquire the session context lock for ExecRestrictedStmt, it + // No need to acquire the session context lock for ExecRestrictedSQL, it // uses another background session. - rows, _, err := exec.ExecRestrictedStmt(context.Background(), stmt) + rows, _, err := exec.ExecRestrictedSQL(context.TODO(), nil, `SELECT original_sql, bind_sql, default_db, status, create_time, update_time, charset, collation, source + FROM mysql.bind_info WHERE update_time > %? ORDER BY update_time, create_time`, updateTime) if err != nil { h.bindInfo.Unlock() return err } - newCache := h.bindInfo.Value.Load().(cache).copy() + newCache := h.bindInfo.Value.Load().(*bindCache).Copy() defer func() { h.bindInfo.lastUpdateTime = lastUpdateTime h.bindInfo.Value.Store(newCache) @@ -155,23 +156,26 @@ func (h *BindHandle) Update(fullLoad bool) (err error) { continue } hash, meta, err := h.newBindRecord(row) - if err != nil { - logutil.BgLogger().Debug("[sql-bind] failed to generate bind record from data row", zap.Error(err)) - continue - } + // Update lastUpdateTime to the newest one. + // Even if this one is an invalid bind. if meta.Bindings[0].UpdateTime.Compare(lastUpdateTime) > 0 { lastUpdateTime = meta.Bindings[0].UpdateTime } - oldRecord := newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db) + if err != nil { + logutil.BgLogger().Debug("[sql-bind] failed to generate bind record from data row", zap.Error(err)) + continue + } + + oldRecord := newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db) newRecord := merge(oldRecord, meta).removeDeletedBindings() if len(newRecord.Bindings) > 0 { - newCache.setBindRecord(hash, newRecord) + newCache.SetBindRecord(hash, newRecord) } else { - newCache.removeDeletedBindRecord(hash, newRecord) + newCache.RemoveBindRecord(hash, newRecord) } - updateMetrics(metrics.ScopeGlobal, oldRecord, newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db), true) + updateMetrics(metrics.ScopeGlobal, oldRecord, newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db), true) } return nil } @@ -217,14 +221,16 @@ func (h *BindHandle) CreateBindRecord(sctx sessionctx.Context, record *BindRecor if err = h.lockBindInfoTable(); err != nil { return err } - // Binding recreation should physically delete previous bindings. - _, err = exec.ExecuteInternal(context.TODO(), `DELETE FROM mysql.bind_info WHERE original_sql = %?`, record.OriginalSQL) + + now := types.NewTime(types.FromGoTime(time.Now()), mysql.TypeTimestamp, 3) + + updateTs := now.String() + _, err = exec.ExecuteInternal(context.TODO(), `UPDATE mysql.bind_info SET status = %?, update_time = %? WHERE original_sql = %? AND update_time < %?`, + deleted, updateTs, record.OriginalSQL, updateTs) if err != nil { return err } - now := types.NewTime(types.FromGoTime(time.Now()), mysql.TypeTimestamp, 3) - for i := range record.Bindings { record.Bindings[i].CreateTime = now record.Bindings[i].UpdateTime = now @@ -378,17 +384,56 @@ func (h *BindHandle) DropBindRecord(originalSQL, db string, binding *Binding) (e updateTs := types.NewTime(types.FromGoTime(time.Now()), mysql.TypeTimestamp, 3).String() if binding == nil { - _, err = exec.ExecuteInternal(context.TODO(), `UPDATE mysql.bind_info SET status = %?, update_time = %? WHERE original_sql = %? AND update_time < %?`, - deleted, updateTs, originalSQL, updateTs) + _, err = exec.ExecuteInternal(context.TODO(), `UPDATE mysql.bind_info SET status = %?, update_time = %? WHERE original_sql = %? AND update_time < %? AND status != %?`, + deleted, updateTs, originalSQL, updateTs, deleted) } else { - _, err = exec.ExecuteInternal(context.TODO(), `UPDATE mysql.bind_info SET status = %?, update_time = %? WHERE original_sql = %? AND update_time < %? AND bind_sql = %?`, - deleted, updateTs, originalSQL, updateTs, binding.BindSQL) + _, err = exec.ExecuteInternal(context.TODO(), `UPDATE mysql.bind_info SET status = %?, update_time = %? WHERE original_sql = %? AND update_time < %? AND bind_sql = %? and status != %?`, + deleted, updateTs, originalSQL, updateTs, binding.BindSQL, deleted) } deleteRows = int(h.sctx.Context.GetSessionVars().StmtCtx.AffectedRows()) return err } +// GCBindRecord physically removes the deleted bind records in mysql.bind_info. +func (h *BindHandle) GCBindRecord() (err error) { + h.bindInfo.Lock() + h.sctx.Lock() + defer func() { + h.sctx.Unlock() + h.bindInfo.Unlock() + }() + exec, _ := h.sctx.Context.(sqlexec.SQLExecutor) + _, err = exec.ExecuteInternal(context.TODO(), "BEGIN PESSIMISTIC") + if err != nil { + return err + } + defer func() { + if err != nil { + _, err1 := exec.ExecuteInternal(context.TODO(), "ROLLBACK") + terror.Log(err1) + return + } + + _, err = exec.ExecuteInternal(context.TODO(), "COMMIT") + if err != nil { + return + } + }() + + // Lock mysql.bind_info to synchronize with CreateBindRecord / AddBindRecord / DropBindRecord on other tidb instances. + if err = h.lockBindInfoTable(); err != nil { + return err + } + + // To make sure that all the deleted bind records have been acknowledged to all tidb, + // we only garbage collect those records with update_time before 10 leases. + updateTime := time.Now().Add(-(10 * Lease)) + updateTimeStr := types.NewTime(types.FromGoTime(updateTime), mysql.TypeTimestamp, 3).String() + _, err = exec.ExecuteInternal(context.TODO(), `DELETE FROM mysql.bind_info WHERE status = 'deleted' and update_time < %?`, updateTimeStr) + return err +} + // lockBindInfoTable simulates `LOCK TABLE mysql.bind_info WRITE` by acquiring a pessimistic lock on a // special builtin row of mysql.bind_info. Note that this function must be called with h.sctx.Lock() held. // We can replace this implementation to normal `LOCK TABLE mysql.bind_info WRITE` if that feature is @@ -473,32 +518,36 @@ func (h *BindHandle) AddDropInvalidBindTask(invalidBindRecord *BindRecord) { // Size returns the size of bind info cache. func (h *BindHandle) Size() int { - size := 0 - for _, bindRecords := range h.bindInfo.Load().(cache) { - size += len(bindRecords) - } + size := len(h.bindInfo.Load().(*bindCache).GetAllBindRecords()) return size } // GetBindRecord returns the BindRecord of the (normdOrigSQL,db) if BindRecord exist. func (h *BindHandle) GetBindRecord(hash, normdOrigSQL, db string) *BindRecord { - return h.bindInfo.Load().(cache).getBindRecord(hash, normdOrigSQL, db) + return h.bindInfo.Load().(*bindCache).GetBindRecord(hash, normdOrigSQL, db) } // GetAllBindRecord returns all bind records in cache. func (h *BindHandle) GetAllBindRecord() (bindRecords []*BindRecord) { - bindRecordMap := h.bindInfo.Load().(cache) - for _, bindRecord := range bindRecordMap { - bindRecords = append(bindRecords, bindRecord...) - } - return bindRecords + return h.bindInfo.Load().(*bindCache).GetAllBindRecords() +} + +// SetBindCacheCapacity reset the capacity for the bindCache. +// It will not affect already cached BindRecords. +func (h *BindHandle) SetBindCacheCapacity(capacity int64) { + h.bindInfo.Load().(*bindCache).SetMemCapacity(capacity) } // newBindRecord builds BindRecord from a tuple in storage. func (h *BindHandle) newBindRecord(row chunk.Row) (string, *BindRecord, error) { + status := row.GetString(3) + // For compatibility, the 'Using' status binding will be converted to the 'Enabled' status binding. + if status == Using { + status = Enabled + } hint := Binding{ BindSQL: row.GetString(1), - Status: row.GetString(3), + Status: status, CreateTime: row.GetTime(4), UpdateTime: row.GetTime(5), Charset: row.GetString(6), @@ -521,9 +570,9 @@ func (h *BindHandle) newBindRecord(row chunk.Row) (string, *BindRecord, error) { // setBindRecord sets the BindRecord to the cache, if there already exists a BindRecord, // it will be overridden. func (h *BindHandle) setBindRecord(hash string, meta *BindRecord) { - newCache := h.bindInfo.Value.Load().(cache).copy() - oldRecord := newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db) - newCache.setBindRecord(hash, meta) + newCache := h.bindInfo.Value.Load().(*bindCache).Copy() + oldRecord := newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db) + newCache.SetBindRecord(hash, meta) h.bindInfo.Value.Store(newCache) updateMetrics(metrics.ScopeGlobal, oldRecord, meta, false) } @@ -531,88 +580,138 @@ func (h *BindHandle) setBindRecord(hash string, meta *BindRecord) { // appendBindRecord addes the BindRecord to the cache, all the stale BindRecords are // removed from the cache after this operation. func (h *BindHandle) appendBindRecord(hash string, meta *BindRecord) { - newCache := h.bindInfo.Value.Load().(cache).copy() - oldRecord := newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db) + newCache := h.bindInfo.Value.Load().(*bindCache).Copy() + oldRecord := newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db) newRecord := merge(oldRecord, meta) - newCache.setBindRecord(hash, newRecord) + newCache.SetBindRecord(hash, newRecord) h.bindInfo.Value.Store(newCache) updateMetrics(metrics.ScopeGlobal, oldRecord, newRecord, false) } // removeBindRecord removes the BindRecord from the cache. func (h *BindHandle) removeBindRecord(hash string, meta *BindRecord) { - newCache := h.bindInfo.Value.Load().(cache).copy() - oldRecord := newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db) - newCache.removeDeletedBindRecord(hash, meta) + newCache := h.bindInfo.Value.Load().(*bindCache).Copy() + oldRecord := newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db) + newCache.RemoveBindRecord(hash, meta) h.bindInfo.Value.Store(newCache) - updateMetrics(metrics.ScopeGlobal, oldRecord, newCache.getBindRecord(hash, meta.OriginalSQL, meta.Db), false) + updateMetrics(metrics.ScopeGlobal, oldRecord, newCache.GetBindRecord(hash, meta.OriginalSQL, meta.Db), false) } -// removeDeletedBindRecord removes the BindRecord which has same originSQL and db with specified BindRecord. -func (c cache) removeDeletedBindRecord(hash string, meta *BindRecord) { - metas, ok := c[hash] - if !ok { - return +func copyBindRecordUpdateMap(oldMap map[string]*bindRecordUpdate) map[string]*bindRecordUpdate { + newMap := make(map[string]*bindRecordUpdate, len(oldMap)) + for k, v := range oldMap { + newMap[k] = v } + return newMap +} - for i := len(metas) - 1; i >= 0; i-- { - if metas[i].isSame(meta) { - metas[i] = metas[i].remove(meta) - if len(metas[i].Bindings) == 0 { - metas = append(metas[:i], metas[i+1:]...) - } - if len(metas) == 0 { - delete(c, hash) - return +type captureFilter struct { + frequency int64 + tables []tablefilter.Filter // `schema.table` + users map[string]struct{} + + fail bool + currentDB string +} + +func (cf *captureFilter) Enter(in ast.Node) (out ast.Node, skipChildren bool) { + switch x := in.(type) { + case *ast.TableName: + tblEntry := stmtctx.TableEntry{ + DB: x.Schema.L, + Table: x.Name.L, + } + if x.Schema.L == "" { + tblEntry.DB = cf.currentDB + } + for _, tableFilter := range cf.tables { + if tableFilter.MatchTable(tblEntry.DB, tblEntry.Table) { + cf.fail = true // some filter is matched } } } - c[hash] = metas + return in, cf.fail } -func (c cache) setBindRecord(hash string, meta *BindRecord) { - metas := c[hash] - for i := range metas { - if metas[i].OriginalSQL == meta.OriginalSQL { - metas[i] = meta - return - } - } - c[hash] = append(c[hash], meta) +func (cf *captureFilter) Leave(in ast.Node) (out ast.Node, ok bool) { + return in, true } -func (c cache) copy() cache { - newCache := make(cache, len(c)) - for k, v := range c { - bindRecords := make([]*BindRecord, len(v)) - copy(bindRecords, v) - newCache[k] = bindRecords - } - return newCache +func (cf *captureFilter) isEmpty() bool { + return len(cf.tables) == 0 && len(cf.users) == 0 } -func copyBindRecordUpdateMap(oldMap map[string]*bindRecordUpdate) map[string]*bindRecordUpdate { - newMap := make(map[string]*bindRecordUpdate, len(oldMap)) - for k, v := range oldMap { - newMap[k] = v +// ParseCaptureTableFilter checks whether this filter is valid and parses it. +func ParseCaptureTableFilter(tableFilter string) (f tablefilter.Filter, valid bool) { + // forbid wildcards '!' and '@' for safety, + // please see https://github.com/pingcap/tidb-tools/tree/master/pkg/table-filter for more details. + tableFilter = strings.TrimLeft(tableFilter, " \t") + if tableFilter == "" { + return nil, false } - return newMap + if tableFilter[0] == '!' || tableFilter[0] == '@' { + return nil, false + } + var err error + f, err = tablefilter.Parse([]string{tableFilter}) + if err != nil { + return nil, false + } + return f, true } -func (c cache) getBindRecord(hash, normdOrigSQL, db string) *BindRecord { - bindRecords := c[hash] - for _, bindRecord := range bindRecords { - if bindRecord.OriginalSQL == normdOrigSQL { - return bindRecord +func (h *BindHandle) extractCaptureFilterFromStorage() (filter *captureFilter) { + filter = &captureFilter{ + frequency: 1, + users: make(map[string]struct{}), + } + exec := h.sctx.Context.(sqlexec.RestrictedSQLExecutor) + // No need to acquire the session context lock for ExecRestrictedSQL, it + // uses another background session. + rows, _, err := exec.ExecRestrictedSQL(context.TODO(), nil, `SELECT filter_type, filter_value FROM mysql.capture_plan_baselines_blacklist order by filter_type`) + if err != nil { + logutil.BgLogger().Warn("[sql-bind] failed to load mysql.capture_plan_baselines_blacklist", zap.Error(err)) + return + } + for _, row := range rows { + filterTp := strings.ToLower(row.GetString(0)) + valStr := strings.ToLower(row.GetString(1)) + switch filterTp { + case "table": + tfilter, valid := ParseCaptureTableFilter(valStr) + if !valid { + logutil.BgLogger().Warn("[sql-bind] capture table filter is invalid, ignore it", zap.String("filter_value", valStr)) + continue + } + filter.tables = append(filter.tables, tfilter) + case "user": + filter.users[valStr] = struct{}{} + case "frequency": + f, err := strconv.ParseInt(valStr, 10, 64) + if err != nil { + logutil.BgLogger().Warn("[sql-bind] failed to parse frequency type value, ignore it", zap.String("filter_value", valStr), zap.Error(err)) + continue + } + if f < 1 { + logutil.BgLogger().Warn("[sql-bind] frequency threshold is less than 1, ignore it", zap.Int64("frequency", f)) + continue + } + if f > filter.frequency { + filter.frequency = f + } + default: + logutil.BgLogger().Warn("[sql-bind] unknown capture filter type, ignore it", zap.String("filter_type", filterTp)) } } - return nil + return } // CaptureBaselines is used to automatically capture plan baselines. func (h *BindHandle) CaptureBaselines() { parser4Capture := parser.New() - bindableStmts := stmtsummary.StmtSummaryByDigestMap.GetMoreThanOnceBindableStmt() + captureFilter := h.extractCaptureFilterFromStorage() + emptyCaptureFilter := captureFilter.isEmpty() + bindableStmts := stmtsummary.StmtSummaryByDigestMap.GetMoreThanCntBindableStmt(captureFilter.frequency) for _, bindableStmt := range bindableStmts { stmt, err := parser4Capture.ParseOneStmt(bindableStmt.Query, bindableStmt.Charset, bindableStmt.Collation) if err != nil { @@ -622,6 +721,27 @@ func (h *BindHandle) CaptureBaselines() { if insertStmt, ok := stmt.(*ast.InsertStmt); ok && insertStmt.Select == nil { continue } + if !emptyCaptureFilter { + captureFilter.fail = false + captureFilter.currentDB = bindableStmt.Schema + stmt.Accept(captureFilter) + if captureFilter.fail { + continue + } + + if len(captureFilter.users) > 0 { + filteredByUser := true + for user := range bindableStmt.Users { + if _, ok := captureFilter.users[user]; !ok { + filteredByUser = false // some user not in the black-list has processed this stmt + break + } + } + if filteredByUser { + continue + } + } + } dbName := utilparser.GetDefaultDB(stmt, bindableStmt.Schema) normalizedSQL, digest := parser.NormalizeDigest(utilparser.RestoreWithDefaultDB(stmt, dbName, bindableStmt.Query)) if r := h.GetBindRecord(digest.String(), normalizedSQL, dbName); r != nil && r.HasUsingBinding() { @@ -634,7 +754,7 @@ func (h *BindHandle) CaptureBaselines() { charset, collation := h.sctx.GetSessionVars().GetCharsetInfo() binding := Binding{ BindSQL: bindSQL, - Status: Using, + Status: Enabled, Charset: charset, Collation: collation, Source: Capture, @@ -657,12 +777,18 @@ func getHintsForSQL(sctx sessionctx.Context, sql string) (string, error) { rs, err := sctx.(sqlexec.SQLExecutor).ExecuteInternal(context.TODO(), fmt.Sprintf("EXPLAIN FORMAT='hint' %s", sql)) sctx.GetSessionVars().UsePlanBaselines = origVals if rs != nil { - defer terror.Call(rs.Close) + defer func() { + // Audit log is collected in Close(), set InRestrictedSQL to avoid 'create sql binding' been recorded as 'explain'. + origin := sctx.GetSessionVars().InRestrictedSQL + sctx.GetSessionVars().InRestrictedSQL = true + terror.Call(rs.Close) + sctx.GetSessionVars().InRestrictedSQL = origin + }() } if err != nil { return "", err } - chk := rs.NewChunk() + chk := rs.NewChunk(nil) err = rs.Next(context.TODO(), chk) if err != nil { return "", err @@ -704,7 +830,22 @@ func GenerateBindSQL(ctx context.Context, stmtNode ast.StmtNode, planHint string bindSQL = bindSQL[updateIdx:] return strings.Replace(bindSQL, "UPDATE", fmt.Sprintf("UPDATE /*+ %s*/", planHint), 1) case *ast.SelectStmt: - selectIdx := strings.Index(bindSQL, "SELECT") + var selectIdx int + if n.With != nil { + var withSb strings.Builder + withIdx := strings.Index(bindSQL, "WITH") + restoreCtx := format.NewRestoreCtx(format.RestoreStringSingleQuotes|format.RestoreSpacesAroundBinaryOperation|format.RestoreStringWithoutCharset|format.RestoreNameBackQuotes, &withSb) + restoreCtx.DefaultDB = defaultDB + err := n.With.Restore(restoreCtx) + if err != nil { + logutil.BgLogger().Debug("[sql-bind] restore SQL failed", zap.Error(err)) + return "" + } + withEnd := withIdx + len(withSb.String()) + tmp := strings.Replace(bindSQL[withEnd:], "SELECT", fmt.Sprintf("SELECT /*+ %s*/", planHint), 1) + return strings.Join([]string{bindSQL[withIdx:withEnd], tmp}, "") + } + selectIdx = strings.Index(bindSQL, "SELECT") // Remove possible `explain` prefix. bindSQL = bindSQL[selectIdx:] return strings.Replace(bindSQL, "SELECT", fmt.Sprintf("SELECT /*+ %s*/", planHint), 1) @@ -755,8 +896,9 @@ func (h *BindHandle) SaveEvolveTasksToStore() { } func getEvolveParameters(ctx sessionctx.Context) (time.Duration, time.Time, time.Time, error) { - stmt, err := ctx.(sqlexec.RestrictedSQLExecutor).ParseWithParams( + rows, _, err := ctx.(sqlexec.RestrictedSQLExecutor).ExecRestrictedSQL( context.TODO(), + nil, "SELECT variable_name, variable_value FROM mysql.global_variables WHERE variable_name IN (%?, %?, %?)", variable.TiDBEvolvePlanTaskMaxTime, variable.TiDBEvolvePlanTaskStartTime, @@ -765,10 +907,6 @@ func getEvolveParameters(ctx sessionctx.Context) (time.Duration, time.Time, time if err != nil { return 0, time.Time{}, time.Time{}, err } - rows, _, err := ctx.(sqlexec.RestrictedSQLExecutor).ExecRestrictedStmt(context.TODO(), stmt) - if err != nil { - return 0, time.Time{}, time.Time{}, err - } maxTime, startTimeStr, endTimeStr := int64(variable.DefTiDBEvolvePlanTaskMaxTime), variable.DefTiDBEvolvePlanTaskStartTime, variable.DefAutoAnalyzeEndTime for _, row := range rows { switch row.GetString(0) { @@ -808,25 +946,23 @@ const ( ) func (h *BindHandle) getOnePendingVerifyJob() (string, string, Binding) { - cache := h.bindInfo.Value.Load().(cache) - for _, bindRecords := range cache { - for _, bindRecord := range bindRecords { - for _, bind := range bindRecord.Bindings { - if bind.Status == PendingVerify { - return bindRecord.OriginalSQL, bindRecord.Db, bind - } - if bind.Status != Rejected { - continue - } - dur, err := bind.SinceUpdateTime() - // Should not happen. - if err != nil { - continue - } - // Rejected and retry it now. - if dur > nextVerifyDuration { - return bindRecord.OriginalSQL, bindRecord.Db, bind - } + cache := h.bindInfo.Value.Load().(*bindCache) + for _, bindRecord := range cache.GetAllBindRecords() { + for _, bind := range bindRecord.Bindings { + if bind.Status == PendingVerify { + return bindRecord.OriginalSQL, bindRecord.Db, bind + } + if bind.Status != Rejected { + continue + } + dur, err := bind.SinceUpdateTime() + // Should not happen. + if err != nil { + continue + } + // Rejected and retry it now. + if dur > nextVerifyDuration { + return bindRecord.OriginalSQL, bindRecord.Db, bind } } } @@ -878,7 +1014,7 @@ func runSQL(ctx context.Context, sctx sessionctx.Context, sql string, resultChan resultChan <- err return } - chk := rs.NewChunk() + chk := rs.NewChunk(nil) for { err = rs.Next(ctx, chk) if err != nil || chk.NumRows() == 0 { @@ -890,7 +1026,7 @@ func runSQL(ctx context.Context, sctx sessionctx.Context, sql string, resultChan } // HandleEvolvePlanTask tries to evolve one plan task. -// It only handle one tasks once because we want each task could use the latest parameters. +// It only processes one task at a time because we want each task to use the latest parameters. func (h *BindHandle) HandleEvolvePlanTask(sctx sessionctx.Context, adminEvolve bool) error { originalSQL, db, binding := h.getOnePendingVerifyJob() if originalSQL == "" { @@ -930,7 +1066,7 @@ func (h *BindHandle) HandleEvolvePlanTask(sctx sessionctx.Context, adminEvolve b zap.String("digestText", digestText), ) } else { - binding.Status = Using + binding.Status = Enabled } // We don't need to pass the `sctx` because the BindSQL has been validated already. return h.AddBindRecord(nil, &BindRecord{OriginalSQL: originalSQL, Db: db, Bindings: []Binding{binding}}) @@ -939,7 +1075,7 @@ func (h *BindHandle) HandleEvolvePlanTask(sctx sessionctx.Context, adminEvolve b // Clear resets the bind handle. It is only used for test. func (h *BindHandle) Clear() { h.bindInfo.Lock() - h.bindInfo.Store(make(cache)) + h.bindInfo.Store(newBindCache()) h.bindInfo.lastUpdateTime = types.ZeroTimestamp h.bindInfo.Unlock() h.invalidBindRecordMap.Store(make(map[string]*bindRecordUpdate)) @@ -957,7 +1093,7 @@ func (h *BindHandle) FlushBindings() error { // It is used to maintain consistency between cache and mysql.bind_info if the table is deleted or truncated. func (h *BindHandle) ReloadBindings() error { h.bindInfo.Lock() - h.bindInfo.Store(make(cache)) + h.bindInfo.Store(newBindCache()) h.bindInfo.lastUpdateTime = types.ZeroTimestamp h.bindInfo.Unlock() return h.Update(true) diff --git a/bindinfo/handle_test.go b/bindinfo/handle_test.go new file mode 100644 index 0000000000000..085bdcbadaeb5 --- /dev/null +++ b/bindinfo/handle_test.go @@ -0,0 +1,524 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "context" + "fmt" + "testing" + + "github.com/pingcap/tidb/bindinfo" + "github.com/pingcap/tidb/config" + "github.com/pingcap/tidb/domain" + "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" + "github.com/pingcap/tidb/testkit" + utilparser "github.com/pingcap/tidb/util/parser" + dto "github.com/prometheus/client_model/go" + "github.com/stretchr/testify/require" +) + +func utilCleanBindingEnv(tk *testkit.TestKit, dom *domain.Domain) { + tk.MustExec("delete from mysql.bind_info where source != 'builtin'") + dom.BindHandle().Clear() +} + +func utilNormalizeWithDefaultDB(t *testing.T, sql, db string) (string, string) { + testParser := parser.New() + stmt, err := testParser.ParseOneStmt(sql, "", "") + require.NoError(t, err) + normalized, digest := parser.NormalizeDigest(utilparser.RestoreWithDefaultDB(stmt, "test", "")) + return normalized, digest.String() +} + +func TestBindingCache(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idx);") + tk.MustExec("create database tmp") + tk.MustExec("use tmp") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idx);") + + require.Nil(t, dom.BindHandle().Update(false)) + require.Nil(t, dom.BindHandle().Update(false)) + res := tk.MustQuery("show global bindings") + require.Equal(t, 2, len(res.Rows())) + + tk.MustExec("drop global binding for select * from t;") + require.Nil(t, dom.BindHandle().Update(false)) + require.Equal(t, 1, len(dom.BindHandle().GetAllBindRecord())) +} + +func TestBindingLastUpdateTime(t *testing.T) { + store, _, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t0;") + tk.MustExec("create table t0(a int, key(a));") + tk.MustExec("create global binding for select * from t0 using select * from t0 use index(a);") + tk.MustExec("admin reload bindings;") + + bindHandle := bindinfo.NewBindHandle(tk.Session()) + err := bindHandle.Update(true) + require.NoError(t, err) + sql, hash := parser.NormalizeDigest("select * from test . t0") + bindData := bindHandle.GetBindRecord(hash.String(), sql, "test") + require.Equal(t, 1, len(bindData.Bindings)) + bind := bindData.Bindings[0] + updateTime := bind.UpdateTime.String() + + rows1 := tk.MustQuery("show status like 'last_plan_binding_update_time';").Rows() + updateTime1 := rows1[0][1] + require.Equal(t, updateTime, updateTime1) + + rows2 := tk.MustQuery("show session status like 'last_plan_binding_update_time';").Rows() + updateTime2 := rows2[0][1] + require.Equal(t, updateTime, updateTime2) + tk.MustQuery(`show global status like 'last_plan_binding_update_time';`).Check(testkit.Rows()) +} + +func TestBindingLastUpdateTimeWithInvalidBind(t *testing.T) { + store, _, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + rows0 := tk.MustQuery("show status like 'last_plan_binding_update_time';").Rows() + updateTime0 := rows0[0][1] + require.Equal(t, updateTime0, "0000-00-00 00:00:00") + + tk.MustExec("insert into mysql.bind_info values('select * from `test` . `t`', 'select * from `test` . `t` use index(`idx`)', 'test', 'enabled', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int)") + tk.MustExec("admin reload bindings;") + + rows1 := tk.MustQuery("show status like 'last_plan_binding_update_time';").Rows() + updateTime1 := rows1[0][1] + require.Equal(t, updateTime1, "2000-01-01 09:00:00.000") + + rows2 := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows2, 0) +} + +func TestBindParse(t *testing.T) { + store, _, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("create table t(i int)") + tk.MustExec("create index index_t on t(i)") + + originSQL := "select * from `test` . `t`" + bindSQL := "select * from `test` . `t` use index(index_t)" + defaultDb := "test" + status := bindinfo.Enabled + charset := "utf8mb4" + collation := "utf8mb4_bin" + source := bindinfo.Manual + sql := fmt.Sprintf(`INSERT INTO mysql.bind_info(original_sql,bind_sql,default_db,status,create_time,update_time,charset,collation,source) VALUES ('%s', '%s', '%s', '%s', NOW(), NOW(),'%s', '%s', '%s')`, + originSQL, bindSQL, defaultDb, status, charset, collation, source) + tk.MustExec(sql) + bindHandle := bindinfo.NewBindHandle(tk.Session()) + err := bindHandle.Update(true) + require.NoError(t, err) + require.Equal(t, 1, bindHandle.Size()) + + sql, hash := parser.NormalizeDigest("select * from test . t") + bindData := bindHandle.GetBindRecord(hash.String(), sql, "test") + require.NotNil(t, bindData) + require.Equal(t, "select * from `test` . `t`", bindData.OriginalSQL) + bind := bindData.Bindings[0] + require.Equal(t, "select * from `test` . `t` use index(index_t)", bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.Equal(t, "utf8mb4", bind.Charset) + require.Equal(t, "utf8mb4_bin", bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) + dur, err := bind.SinceUpdateTime() + require.NoError(t, err) + require.GreaterOrEqual(t, int64(dur), int64(0)) + + // Test fields with quotes or slashes. + sql = `CREATE GLOBAL BINDING FOR select * from t where i BETWEEN "a" and "b" USING select * from t use index(index_t) where i BETWEEN "a\nb\rc\td\0e" and 'x'` + tk.MustExec(sql) + tk.MustExec(`DROP global binding for select * from t use index(idx) where i BETWEEN "a\nb\rc\td\0e" and "x"`) + + // Test SetOprStmt. + tk.MustExec(`create binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()`) + tk.MustExec(`drop binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()`) + tk.MustExec(`create binding for select * from t INTERSECT select * from t using select * from t use index(index_t) INTERSECT select * from t use index()`) + tk.MustExec(`drop binding for select * from t INTERSECT select * from t using select * from t use index(index_t) INTERSECT select * from t use index()`) + tk.MustExec(`create binding for select * from t EXCEPT select * from t using select * from t use index(index_t) EXCEPT select * from t use index()`) + tk.MustExec(`drop binding for select * from t EXCEPT select * from t using select * from t use index(index_t) EXCEPT select * from t use index()`) + tk.MustExec(`create binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())`) + tk.MustExec(`drop binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())`) + + // Test Update / Delete. + tk.MustExec("create table t1(a int, b int, c int, key(b), key(c))") + tk.MustExec("create table t2(a int, b int, c int, key(b), key(c))") + tk.MustExec("create binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1, c) */ from t1 where b = 1 and c > 1") + tk.MustExec("drop binding for delete from t1 where b = 1 and c > 1 using delete /*+ use_index(t1, c) */ from t1 where b = 1 and c > 1") + tk.MustExec("create binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1 using delete /*+ hash_join(t1, t2), use_index(t1, c) */ t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1") + tk.MustExec("drop binding for delete t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1 using delete /*+ hash_join(t1, t2), use_index(t1, c) */ t1, t2 from t1 inner join t2 on t1.b = t2.b where t1.c = 1") + tk.MustExec("create binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1, c) */ t1 set a = 1 where b = 1 and c > 1") + tk.MustExec("drop binding for update t1 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t1, c) */ t1 set a = 1 where b = 1 and c > 1") + tk.MustExec("create binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") + tk.MustExec("drop binding for update t1, t2 set t1.a = 1 where t1.b = t2.b using update /*+ inl_join(t1) */ t1, t2 set t1.a = 1 where t1.b = t2.b") + // Test Insert / Replace. + tk.MustExec("create binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") + tk.MustExec("drop binding for insert into t1 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") + tk.MustExec("create binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") + tk.MustExec("drop binding for replace into t1 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t1 select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1") + err = tk.ExecToErr("create binding for insert into t1 values(1,1,1) using insert into t1 values(1,1,1)") + require.Equal(t, "create binding only supports INSERT / REPLACE INTO SELECT", err.Error()) + err = tk.ExecToErr("create binding for replace into t1 values(1,1,1) using replace into t1 values(1,1,1)") + require.Equal(t, "create binding only supports INSERT / REPLACE INTO SELECT", err.Error()) + + // Test errors. + tk.MustExec(`drop table if exists t1`) + tk.MustExec("create table t1(i int, s varchar(20))") + _, err = tk.Exec("create global binding for select * from t using select * from t1 use index for join(index_t)") + require.NotNil(t, err, "err %v", err) +} + +func TestEvolveInvalidBindings(t *testing.T) { + originalVal := config.CheckTableBeforeDrop + config.CheckTableBeforeDrop = true + defer func() { + config.CheckTableBeforeDrop = originalVal + }() + + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx_a(a))") + tk.MustExec("create global binding for select * from t where a > 10 using select /*+ USE_INDEX(t) */ * from t where a > 10") + // Manufacture a rejected binding by hacking mysql.bind_info. + tk.MustExec("insert into mysql.bind_info values('select * from test . t where a > ?', 'SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10', 'test', 'rejected', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustQuery("select bind_sql, status from mysql.bind_info where source != 'builtin'").Sort().Check(testkit.Rows( + "SELECT /*+ USE_INDEX(`t` )*/ * FROM `test`.`t` WHERE `a` > 10 enabled", + "SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10 rejected", + )) + // Reload cache from mysql.bind_info. + dom.BindHandle().Clear() + require.Nil(t, dom.BindHandle().Update(true)) + + tk.MustExec("alter table t drop index idx_a") + tk.MustExec("admin evolve bindings") + require.Nil(t, dom.BindHandle().Update(false)) + rows := tk.MustQuery("show global bindings").Sort().Rows() + require.Equal(t, 2, len(rows)) + // Make sure this "enabled" binding is not overrided. + require.Equal(t, "SELECT /*+ USE_INDEX(`t` )*/ * FROM `test`.`t` WHERE `a` > 10", rows[0][1]) + status := rows[0][3].(string) + require.True(t, status == bindinfo.Enabled) + require.Equal(t, "SELECT /*+ USE_INDEX(t,idx_a) */ * FROM test.t WHERE a > 10", rows[1][1]) + status = rows[1][3].(string) + require.True(t, status == bindinfo.Enabled || status == bindinfo.Rejected) +} + +var testSQLs = []struct { + createSQL string + overlaySQL string + querySQL string + originSQL string + bindSQL string + dropSQL string + memoryUsage float64 +}{ + { + createSQL: "binding for select * from t where i>100 using select * from t use index(index_t) where i>100", + overlaySQL: "binding for select * from t where i>99 using select * from t use index(index_t) where i>99", + querySQL: "select * from t where i > 30.0", + originSQL: "select * from `test` . `t` where `i` > ?", + bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) WHERE `i` > 99", + dropSQL: "binding for select * from t where i>100", + memoryUsage: float64(167), + }, + { + createSQL: "binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()", + overlaySQL: "", + querySQL: "select * from t union all select * from t", + originSQL: "select * from `test` . `t` union all select * from `test` . `t`", + bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) UNION ALL SELECT * FROM `test`.`t` USE INDEX ()", + dropSQL: "binding for select * from t union all select * from t", + memoryUsage: float64(237), + }, + { + createSQL: "binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())", + overlaySQL: "", + querySQL: "(select * from t) union all (select * from t)", + originSQL: "( select * from `test` . `t` ) union all ( select * from `test` . `t` )", + bindSQL: "(SELECT * FROM `test`.`t` USE INDEX (`index_t`)) UNION ALL (SELECT * FROM `test`.`t` USE INDEX ())", + dropSQL: "binding for (select * from t) union all (select * from t)", + memoryUsage: float64(249), + }, + { + createSQL: "binding for select * from t intersect select * from t using select * from t use index(index_t) intersect select * from t use index()", + overlaySQL: "", + querySQL: "select * from t intersect select * from t", + originSQL: "select * from `test` . `t` intersect select * from `test` . `t`", + bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) INTERSECT SELECT * FROM `test`.`t` USE INDEX ()", + dropSQL: "binding for select * from t intersect select * from t", + memoryUsage: float64(237), + }, + { + createSQL: "binding for select * from t except select * from t using select * from t use index(index_t) except select * from t use index()", + overlaySQL: "", + querySQL: "select * from t except select * from t", + originSQL: "select * from `test` . `t` except select * from `test` . `t`", + bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) EXCEPT SELECT * FROM `test`.`t` USE INDEX ()", + dropSQL: "binding for select * from t except select * from t", + memoryUsage: float64(231), + }, + { + createSQL: "binding for select * from t using select /*+ use_index(t,index_t)*/ * from t", + overlaySQL: "", + querySQL: "select * from t ", + originSQL: "select * from `test` . `t`", + bindSQL: "SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t`", + dropSQL: "binding for select * from t", + memoryUsage: float64(166), + }, + { + createSQL: "binding for delete from t where i = 1 using delete /*+ use_index(t,index_t) */ from t where i = 1", + overlaySQL: "", + querySQL: "delete from t where i = 2", + originSQL: "delete from `test` . `t` where `i` = ?", + bindSQL: "DELETE /*+ use_index(`t` `index_t`)*/ FROM `test`.`t` WHERE `i` = 1", + dropSQL: "binding for delete from t where i = 1", + memoryUsage: float64(190), + }, + { + createSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1 using delete /*+ use_index(t,index_t), hash_join(t,t1) */ t, t1 from t inner join t1 on t.s = t1.s where t.i = 1", + overlaySQL: "", + querySQL: "delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 2", + originSQL: "delete `test` . `t` , `test` . `t1` from `test` . `t` join `test` . `t1` on `t` . `s` = `t1` . `s` where `t` . `i` = ?", + bindSQL: "DELETE /*+ use_index(`t` `index_t`) hash_join(`t`, `t1`)*/ `test`.`t`,`test`.`t1` FROM `test`.`t` JOIN `test`.`t1` ON `t`.`s` = `t1`.`s` WHERE `t`.`i` = 1", + dropSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1", + memoryUsage: float64(402), + }, + { + createSQL: "binding for update t set s = 'a' where i = 1 using update /*+ use_index(t,index_t) */ t set s = 'a' where i = 1", + overlaySQL: "", + querySQL: "update t set s='b' where i=2", + originSQL: "update `test` . `t` set `s` = ? where `i` = ?", + bindSQL: "UPDATE /*+ use_index(`t` `index_t`)*/ `test`.`t` SET `s`='a' WHERE `i` = 1", + dropSQL: "binding for update t set s = 'a' where i = 1", + memoryUsage: float64(204), + }, + { + createSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i using update /*+ inl_join(t1) */ t, t1 set t.s = 'a' where t.i = t1.i", + overlaySQL: "", + querySQL: "update t , t1 set t.s='b' where t.i=t1.i", + originSQL: "update ( `test` . `t` ) join `test` . `t1` set `t` . `s` = ? where `t` . `i` = `t1` . `i`", + bindSQL: "UPDATE /*+ inl_join(`t1`)*/ (`test`.`t`) JOIN `test`.`t1` SET `t`.`s`='a' WHERE `t`.`i` = `t1`.`i`", + dropSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i", + memoryUsage: float64(262), + }, + { + createSQL: "binding for insert into t1 select * from t where t.i = 1 using insert into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1", + overlaySQL: "", + querySQL: "insert into t1 select * from t where t.i = 2", + originSQL: "insert into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?", + bindSQL: "INSERT INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1", + dropSQL: "binding for insert into t1 select * from t where t.i = 1", + memoryUsage: float64(254), + }, + { + createSQL: "binding for replace into t1 select * from t where t.i = 1 using replace into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1", + overlaySQL: "", + querySQL: "replace into t1 select * from t where t.i = 2", + originSQL: "replace into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?", + bindSQL: "REPLACE INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1", + dropSQL: "binding for replace into t1 select * from t where t.i = 1", + memoryUsage: float64(256), + }, +} + +func TestGlobalBinding(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + for _, testSQL := range testSQLs { + utilCleanBindingEnv(tk, dom) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("drop table if exists t1") + tk.MustExec("create table t(i int, s varchar(20))") + tk.MustExec("create table t1(i int, s varchar(20))") + tk.MustExec("create index index_t on t(i,s)") + + metrics.BindTotalGauge.Reset() + metrics.BindMemoryUsage.Reset() + + _, err := tk.Exec("create global " + testSQL.createSQL) + require.NoError(t, err, "err %v", err) + + if testSQL.overlaySQL != "" { + _, err = tk.Exec("create global " + testSQL.overlaySQL) + require.NoError(t, err) + } + + pb := &dto.Metric{} + err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeGlobal, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(1), pb.GetGauge().GetValue()) + err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeGlobal, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, testSQL.memoryUsage, pb.GetGauge().GetValue()) + + sql, hash := utilNormalizeWithDefaultDB(t, testSQL.querySQL, "test") + + bindData := dom.BindHandle().GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, testSQL.originSQL, bindData.OriginalSQL) + bind := bindData.Bindings[0] + require.Equal(t, testSQL.bindSQL, bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.NotNil(t, bind.Charset) + require.NotNil(t, bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) + + rs, err := tk.Exec("show global bindings") + require.NoError(t, err) + chk := rs.NewChunk(nil) + err = rs.Next(context.TODO(), chk) + require.NoError(t, err) + require.Equal(t, 1, chk.NumRows()) + row := chk.GetRow(0) + require.Equal(t, testSQL.originSQL, row.GetString(0)) + require.Equal(t, testSQL.bindSQL, row.GetString(1)) + require.Equal(t, "test", row.GetString(2)) + require.Equal(t, bindinfo.Enabled, row.GetString(3)) + require.NotNil(t, row.GetTime(4)) + require.NotNil(t, row.GetTime(5)) + require.NotNil(t, row.GetString(6)) + require.NotNil(t, row.GetString(7)) + + bindHandle := bindinfo.NewBindHandle(tk.Session()) + err = bindHandle.Update(true) + require.NoError(t, err) + require.Equal(t, 1, bindHandle.Size()) + + bindData = bindHandle.GetBindRecord(hash, sql, "test") + require.NotNil(t, bindData) + require.Equal(t, testSQL.originSQL, bindData.OriginalSQL) + bind = bindData.Bindings[0] + require.Equal(t, testSQL.bindSQL, bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.NotNil(t, bind.Charset) + require.NotNil(t, bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) + + _, err = tk.Exec("drop global " + testSQL.dropSQL) + require.NoError(t, err) + bindData = dom.BindHandle().GetBindRecord(hash, sql, "test") + require.Nil(t, bindData) + + err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeGlobal, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(0), pb.GetGauge().GetValue()) + err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeGlobal, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + // From newly created global bind handle. + require.Equal(t, testSQL.memoryUsage, pb.GetGauge().GetValue()) + + bindHandle = bindinfo.NewBindHandle(tk.Session()) + err = bindHandle.Update(true) + require.NoError(t, err) + require.Equal(t, 0, bindHandle.Size()) + + bindData = bindHandle.GetBindRecord(hash, sql, "test") + require.Nil(t, bindData) + + rs, err = tk.Exec("show global bindings") + require.NoError(t, err) + chk = rs.NewChunk(nil) + err = rs.Next(context.TODO(), chk) + require.NoError(t, err) + require.Equal(t, 0, chk.NumRows()) + + _, err = tk.Exec("delete from mysql.bind_info where source != 'builtin'") + require.NoError(t, err) + } +} + +func TestOutdatedInfoSchema(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idx)") + require.Nil(t, dom.BindHandle().Update(false)) + utilCleanBindingEnv(tk, dom) + tk.MustExec("create global binding for select * from t using select * from t use index(idx)") +} + +func TestReloadBindings(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idx)") + rows := tk.MustQuery("show global bindings").Rows() + require.Equal(t, 1, len(rows)) + rows = tk.MustQuery("select * from mysql.bind_info where source != 'builtin'").Rows() + require.Equal(t, 1, len(rows)) + tk.MustExec("delete from mysql.bind_info where source != 'builtin'") + require.Nil(t, dom.BindHandle().Update(false)) + rows = tk.MustQuery("show global bindings").Rows() + require.Equal(t, 1, len(rows)) + require.Nil(t, dom.BindHandle().Update(true)) + rows = tk.MustQuery("show global bindings").Rows() + require.Equal(t, 1, len(rows)) + tk.MustExec("admin reload bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Equal(t, 0, len(rows)) +} diff --git a/bindinfo/main_test.go b/bindinfo/main_test.go new file mode 100644 index 0000000000000..a2e209ec2dd0b --- /dev/null +++ b/bindinfo/main_test.go @@ -0,0 +1,31 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "testing" + + "github.com/pingcap/tidb/util/testbridge" + "go.uber.org/goleak" +) + +func TestMain(m *testing.M) { + testbridge.SetupForCommonTest() + opts := []goleak.Option{ + goleak.IgnoreTopFunction("go.etcd.io/etcd/client/pkg/v3/logutil.(*MergeLogger).outputLoop"), + goleak.IgnoreTopFunction("go.opencensus.io/stats/view.(*worker).start"), + } + goleak.VerifyTestMain(m, opts...) +} diff --git a/bindinfo/optimize_test.go b/bindinfo/optimize_test.go new file mode 100644 index 0000000000000..228a44ab93693 --- /dev/null +++ b/bindinfo/optimize_test.go @@ -0,0 +1,39 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "testing" + + "github.com/pingcap/failpoint" + "github.com/pingcap/tidb/testkit" + "github.com/stretchr/testify/require" +) + +func TestOptimizeOnlyOnce(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idxa(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idxa)") + require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/planner/checkOptimizeCountOne", "return")) + defer func() { + require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/planner/checkOptimizeCountOne")) + }() + tk.MustQuery("select * from t").Check(testkit.Rows()) +} diff --git a/bindinfo/session_handle.go b/bindinfo/session_handle.go index 6b54aa9118f77..4256e77851292 100644 --- a/bindinfo/session_handle.go +++ b/bindinfo/session_handle.go @@ -8,6 +8,7 @@ // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. @@ -17,31 +18,31 @@ import ( "strings" "time" - "github.com/pingcap/parser" - "github.com/pingcap/parser/mysql" "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" + "github.com/pingcap/tidb/parser/mysql" "github.com/pingcap/tidb/sessionctx" "github.com/pingcap/tidb/types" ) // SessionHandle is used to handle all session sql bind operations. type SessionHandle struct { - ch cache + ch *bindCache parser *parser.Parser } // NewSessionBindHandle creates a new SessionBindHandle. func NewSessionBindHandle(parser *parser.Parser) *SessionHandle { sessionHandle := &SessionHandle{parser: parser} - sessionHandle.ch = make(cache) + sessionHandle.ch = newBindCache() return sessionHandle } // appendBindRecord adds the BindRecord to the cache, all the stale bindMetas are // removed from the cache after this operation. func (h *SessionHandle) appendBindRecord(hash string, meta *BindRecord) { - oldRecord := h.ch.getBindRecord(hash, meta.OriginalSQL, meta.Db) - h.ch.setBindRecord(hash, meta) + oldRecord := h.ch.GetBindRecord(hash, meta.OriginalSQL, meta.Db) + h.ch.SetBindRecord(hash, meta) updateMetrics(metrics.ScopeSession, oldRecord, meta, false) } @@ -67,7 +68,8 @@ func (h *SessionHandle) CreateBindRecord(sctx sessionctx.Context, record *BindRe // DropBindRecord drops a BindRecord in the cache. func (h *SessionHandle) DropBindRecord(originalSQL, db string, binding *Binding) error { db = strings.ToLower(db) - oldRecord := h.GetBindRecord(originalSQL, db) + hash := parser.DigestNormalized(originalSQL).String() + oldRecord := h.GetBindRecord(hash, originalSQL, db) var newRecord *BindRecord record := &BindRecord{OriginalSQL: originalSQL, Db: db} if binding != nil { @@ -78,37 +80,25 @@ func (h *SessionHandle) DropBindRecord(originalSQL, db string, binding *Binding) } else { newRecord = record } - h.ch.setBindRecord(parser.DigestNormalized(record.OriginalSQL).String(), newRecord) + h.ch.SetBindRecord(hash, newRecord) updateMetrics(metrics.ScopeSession, oldRecord, newRecord, false) return nil } // GetBindRecord return the BindMeta of the (normdOrigSQL,db) if BindMeta exist. -func (h *SessionHandle) GetBindRecord(normdOrigSQL, db string) *BindRecord { - hash := parser.DigestNormalized(normdOrigSQL).String() - bindRecords := h.ch[hash] - for _, bindRecord := range bindRecords { - if bindRecord.OriginalSQL == normdOrigSQL { - return bindRecord - } - } - return nil +func (h *SessionHandle) GetBindRecord(hash, normdOrigSQL, db string) *BindRecord { + return h.ch.GetBindRecord(hash, normdOrigSQL, db) } // GetAllBindRecord return all session bind info. func (h *SessionHandle) GetAllBindRecord() (bindRecords []*BindRecord) { - for _, bindRecord := range h.ch { - bindRecords = append(bindRecords, bindRecord...) - } - return bindRecords + return h.ch.GetAllBindRecords() } // Close closes the session handle. func (h *SessionHandle) Close() { - for _, bindRecords := range h.ch { - for _, bindRecord := range bindRecords { - updateMetrics(metrics.ScopeSession, bindRecord, nil, false) - } + for _, bindRecord := range h.ch.GetAllBindRecords() { + updateMetrics(metrics.ScopeSession, bindRecord, nil, false) } } diff --git a/bindinfo/session_handle_test.go b/bindinfo/session_handle_test.go new file mode 100644 index 0000000000000..528af7a5705e7 --- /dev/null +++ b/bindinfo/session_handle_test.go @@ -0,0 +1,580 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "context" + "crypto/tls" + "strconv" + "testing" + "time" + + "github.com/pingcap/tidb/bindinfo" + "github.com/pingcap/tidb/errno" + "github.com/pingcap/tidb/metrics" + "github.com/pingcap/tidb/parser" + "github.com/pingcap/tidb/parser/auth" + plannercore "github.com/pingcap/tidb/planner/core" + "github.com/pingcap/tidb/session/txninfo" + "github.com/pingcap/tidb/testkit" + "github.com/pingcap/tidb/util" + "github.com/pingcap/tidb/util/stmtsummary" + dto "github.com/prometheus/client_model/go" + "github.com/stretchr/testify/require" +) + +func TestGlobalAndSessionBindingBothExist(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + tk.MustExec("use test") + tk.MustExec("drop table if exists t1") + tk.MustExec("drop table if exists t2") + tk.MustExec("create table t1(id int)") + tk.MustExec("create table t2(id int)") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + require.True(t, tk.HasPlan("SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id", "MergeJoin")) + + tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") + + // Test bindingUsage, which indicates how many times the binding is used. + metrics.BindUsageCounter.Reset() + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) + pb := &dto.Metric{} + err := metrics.BindUsageCounter.WithLabelValues(metrics.ScopeGlobal).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(1), pb.GetCounter().GetValue()) + + // Test 'tidb_use_plan_baselines' + tk.MustExec("set @@tidb_use_plan_baselines = 0") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + tk.MustExec("set @@tidb_use_plan_baselines = 1") + + // Test 'drop global binding' + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) + tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + + // Test the case when global and session binding both exist + // PART1 : session binding should totally cover global binding + // use merge join as session binding here since the optimizer will choose hash join for this stmt in default + tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_HJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + tk.MustExec("create binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) + tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "MergeJoin")) + + // PART2 : the dropped session binding should continue to block the effect of global binding + tk.MustExec("create global binding for SELECT * from t1,t2 where t1.id = t2.id using SELECT /*+ TIDB_SMJ(t1, t2) */ * from t1,t2 where t1.id = t2.id") + tk.MustExec("drop binding for SELECT * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) + tk.MustExec("drop global binding for SELECT * from t1,t2 where t1.id = t2.id") + require.True(t, tk.HasPlan("SELECT * from t1,t2 where t1.id = t2.id", "HashJoin")) +} + +func TestSessionBinding(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + for _, testSQL := range testSQLs { + utilCleanBindingEnv(tk, dom) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("drop table if exists t1") + tk.MustExec("create table t(i int, s varchar(20))") + tk.MustExec("create table t1(i int, s varchar(20))") + tk.MustExec("create index index_t on t(i,s)") + + metrics.BindTotalGauge.Reset() + metrics.BindMemoryUsage.Reset() + + _, err := tk.Exec("create session " + testSQL.createSQL) + require.NoError(t, err, "err %v", err) + + if testSQL.overlaySQL != "" { + _, err = tk.Exec("create session " + testSQL.overlaySQL) + require.NoError(t, err) + } + + pb := &dto.Metric{} + err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeSession, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(1), pb.GetGauge().GetValue()) + err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeSession, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, testSQL.memoryUsage, pb.GetGauge().GetValue()) + + handle := tk.Session().Value(bindinfo.SessionBindInfoKeyType).(*bindinfo.SessionHandle) + hash := parser.DigestNormalized(testSQL.originSQL).String() + bindData := handle.GetBindRecord(hash, testSQL.originSQL, "test") + require.NotNil(t, bindData) + require.Equal(t, testSQL.originSQL, bindData.OriginalSQL) + bind := bindData.Bindings[0] + require.Equal(t, testSQL.bindSQL, bind.BindSQL) + require.Equal(t, "test", bindData.Db) + require.Equal(t, bindinfo.Enabled, bind.Status) + require.NotNil(t, bind.Charset) + require.NotNil(t, bind.Collation) + require.NotNil(t, bind.CreateTime) + require.NotNil(t, bind.UpdateTime) + + rs, err := tk.Exec("show global bindings") + require.NoError(t, err) + chk := rs.NewChunk(nil) + err = rs.Next(context.TODO(), chk) + require.NoError(t, err) + require.Equal(t, 0, chk.NumRows()) + + rs, err = tk.Exec("show session bindings") + require.NoError(t, err) + chk = rs.NewChunk(nil) + err = rs.Next(context.TODO(), chk) + require.NoError(t, err) + require.Equal(t, 1, chk.NumRows()) + row := chk.GetRow(0) + require.Equal(t, testSQL.originSQL, row.GetString(0)) + require.Equal(t, testSQL.bindSQL, row.GetString(1)) + require.Equal(t, "test", row.GetString(2)) + require.Equal(t, bindinfo.Enabled, row.GetString(3)) + require.NotNil(t, row.GetTime(4)) + require.NotNil(t, row.GetTime(5)) + require.NotNil(t, row.GetString(6)) + require.NotNil(t, row.GetString(7)) + + _, err = tk.Exec("drop session " + testSQL.dropSQL) + require.NoError(t, err) + bindData = handle.GetBindRecord(hash, testSQL.originSQL, "test") + require.NotNil(t, bindData) + require.Equal(t, testSQL.originSQL, bindData.OriginalSQL) + require.Len(t, bindData.Bindings, 0) + + err = metrics.BindTotalGauge.WithLabelValues(metrics.ScopeSession, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(0), pb.GetGauge().GetValue()) + err = metrics.BindMemoryUsage.WithLabelValues(metrics.ScopeSession, bindinfo.Enabled).Write(pb) + require.NoError(t, err) + require.Equal(t, float64(0), pb.GetGauge().GetValue()) + } +} + +func TestBaselineDBLowerCase(t *testing.T) { + store, dom, clean := testkit.CreateMockStoreAndDomain(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("drop database if exists SPM") + tk.MustExec("create database SPM") + tk.MustExec("use SPM") + tk.MustExec("create table t(a int, b int)") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + tk.MustExec("update t set a = a + 1") + tk.MustExec("update t set a = a + 1") + tk.MustExec("admin capture bindings") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "update `spm` . `t` set `a` = `a` + ?", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustExec("drop global binding for update t set a = a + 1") + rows = tk.MustQuery("show global bindings").Rows() + // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. + require.Len(t, rows, 0) + + tk.MustExec("create global binding for select * from t using select * from t") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `spm` . `t`", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustExec("drop global binding for select * from t") + rows = tk.MustQuery("show global bindings").Rows() + // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. + require.Len(t, rows, 0) + + tk.MustExec("create session binding for select * from t using select * from t") + rows = tk.MustQuery("show session bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `spm` . `t`", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustExec("drop session binding for select * from t") + rows = tk.MustQuery("show session bindings").Rows() + // DROP SESSION BINGING should remove the binding even if we are in SPM database. + require.Len(t, rows, 0) + + utilCleanBindingEnv(tk, dom) + + // Simulate existing bindings with upper case default_db. + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select * from `spm` . `t`', 'SPM', 'enabled', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustQuery("select original_sql, default_db from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( + "select * from `spm` . `t` SPM", + )) + tk.MustExec("admin reload bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `spm` . `t`", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustExec("drop global binding for select * from t") + rows = tk.MustQuery("show global bindings").Rows() + // DROP GLOBAL BINGING should remove the binding even if we are in SPM database. + require.Len(t, rows, 0) + + utilCleanBindingEnv(tk, dom) + // Simulate existing bindings with upper case default_db. + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select * from `spm` . `t`', 'SPM', 'enabled', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustQuery("select original_sql, default_db from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( + "select * from `spm` . `t` SPM", + )) + tk.MustExec("admin reload bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `spm` . `t`", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustExec("create global binding for select * from t using select * from t") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "select * from `spm` . `t`", rows[0][0]) + // default_db should have lower case. + require.Equal(t, "spm", rows[0][2]) + tk.MustQuery("select original_sql, default_db, status from mysql.bind_info where original_sql = 'select * from `spm` . `t`'").Check(testkit.Rows( + "select * from `spm` . `t` SPM deleted", + "select * from `spm` . `t` spm enabled", + )) +} + +func TestShowGlobalBindings(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + stmtsummary.StmtSummaryByDigestMap.Clear() + tk.MustExec("drop database if exists SPM") + tk.MustExec("create database SPM") + tk.MustExec("use SPM") + tk.MustExec("create table t(a int, b int, key(a))") + tk.MustExec("create table t0(a int, b int, key(a))") + require.True(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil)) + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) + // Simulate existing bindings in the mysql.bind_info. + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select * from `spm` . `t` USE INDEX (`a`)', 'SPM', 'enabled', '2000-01-01 09:00:00', '2000-01-01 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t0`', 'select * from `spm` . `t0` USE INDEX (`a`)', 'SPM', 'enabled', '2000-01-02 09:00:00', '2000-01-02 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t`', 'select /*+ use_index(`t` `a`)*/ * from `spm` . `t`', 'SPM', 'enabled', '2000-01-03 09:00:00', '2000-01-03 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustExec("insert into mysql.bind_info values('select * from `spm` . `t0`', 'select /*+ use_index(`t0` `a`)*/ * from `spm` . `t0`', 'SPM', 'enabled', '2000-01-04 09:00:00', '2000-01-04 09:00:00', '', '','" + + bindinfo.Manual + "')") + tk.MustExec("admin reload bindings") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 4) + require.Equal(t, "select * from `spm` . `t0`", rows[0][0]) + require.Equal(t, "2000-01-04 09:00:00.000", rows[0][5]) + require.Equal(t, "select * from `spm` . `t0`", rows[1][0]) + require.Equal(t, "2000-01-02 09:00:00.000", rows[1][5]) + require.Equal(t, "select * from `spm` . `t`", rows[2][0]) + require.Equal(t, "2000-01-03 09:00:00.000", rows[2][5]) + require.Equal(t, "select * from `spm` . `t`", rows[3][0]) + require.Equal(t, "2000-01-01 09:00:00.000", rows[3][5]) + + rows = tk.MustQuery("show session bindings").Rows() + require.Len(t, rows, 0) + tk.MustExec("create session binding for select a from t using select a from t") + tk.MustExec("create session binding for select a from t0 using select a from t0") + tk.MustExec("create session binding for select b from t using select b from t") + tk.MustExec("create session binding for select b from t0 using select b from t0") + rows = tk.MustQuery("show session bindings").Rows() + require.Len(t, rows, 4) + require.Equal(t, "select `b` from `spm` . `t0`", rows[0][0]) + require.Equal(t, "select `b` from `spm` . `t`", rows[1][0]) + require.Equal(t, "select `a` from `spm` . `t0`", rows[2][0]) + require.Equal(t, "select `a` from `spm` . `t`", rows[3][0]) +} + +func TestDuplicateBindings(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from t using select * from t use index(idx);") + rows := tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + createTime := rows[0][4] + time.Sleep(time.Millisecond) + tk.MustExec("create global binding for select * from t using select * from t use index(idx);") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.False(t, createTime == rows[0][4]) + + tk.MustExec("create session binding for select * from t using select * from t use index(idx);") + rows = tk.MustQuery("show session bindings").Rows() + require.Len(t, rows, 1) + createTime = rows[0][4] + time.Sleep(time.Millisecond) + tk.MustExec("create session binding for select * from t using select * from t use index(idx);") + rows = tk.MustQuery("show session bindings").Rows() + require.Len(t, rows, 1) + require.False(t, createTime == rows[0][4]) +} + +func TestDefaultDB(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec("create global binding for select * from test.t using select * from test.t use index(idx)") + tk.MustExec("use mysql") + tk.MustQuery("select * from test.t") + // Even in another database, we could still use the bindings. + require.Equal(t, "t:idx", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("drop global binding for select * from test.t") + tk.MustQuery("show global bindings").Check(testkit.Rows()) + + tk.MustExec("use test") + tk.MustExec("create session binding for select * from test.t using select * from test.t use index(idx)") + tk.MustExec("use mysql") + tk.MustQuery("select * from test.t") + // Even in another database, we could still use the bindings. + require.Equal(t, "t:idx", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("drop session binding for select * from test.t") + tk.MustQuery("show session bindings").Check(testkit.Rows()) +} + +type mockSessionManager struct { + PS []*util.ProcessInfo +} + +func (msm *mockSessionManager) ShowTxnList() []*txninfo.TxnInfo { + panic("unimplemented!") +} + +func (msm *mockSessionManager) ShowProcessList() map[uint64]*util.ProcessInfo { + ret := make(map[uint64]*util.ProcessInfo) + for _, item := range msm.PS { + ret[item.ID] = item + } + return ret +} + +func (msm *mockSessionManager) GetProcessInfo(id uint64) (*util.ProcessInfo, bool) { + for _, item := range msm.PS { + if item.ID == id { + return item, true + } + } + return &util.ProcessInfo{}, false +} + +func (msm *mockSessionManager) Kill(cid uint64, query bool) { +} + +func (msm *mockSessionManager) KillAllConnections() { +} + +func (msm *mockSessionManager) UpdateTLSConfig(cfg *tls.Config) { +} + +func (msm *mockSessionManager) ServerID() uint64 { + return 1 +} + +func TestIssue19836(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, key (a));") + tk.MustExec("CREATE SESSION BINDING FOR select * from t where a = 1 limit 5, 5 USING select * from t ignore index (a) where a = 1 limit 5, 5;") + tk.MustExec("PREPARE stmt FROM 'select * from t where a = 40 limit ?, ?';") + tk.MustExec("set @a=1;") + tk.MustExec("set @b=2;") + tk.MustExec("EXECUTE stmt USING @a, @b;") + tk.Session().SetSessionManager(&mockSessionManager{ + PS: []*util.ProcessInfo{tk.Session().ShowProcess()}, + }) + explainResult := testkit.Rows( + "Limit_8 2.00 0 root time:0s, loops:0 offset:1, count:2 N/A N/A", + "└─TableReader_13 3.00 0 root time:0s, loops:0 data:Limit_12 N/A N/A", + " └─Limit_12 3.00 0 cop[tikv] offset:0, count:3 N/A N/A", + " └─Selection_11 3.00 0 cop[tikv] eq(test.t.a, 40) N/A N/A", + " └─TableFullScan_10 3000.00 0 cop[tikv] table:t keep order:false, stats:pseudo N/A N/A", + ) + tk.MustQuery("explain for connection " + strconv.FormatUint(tk.Session().ShowProcess().ID, 10)).Check(explainResult) +} + +func TestTemporaryTable(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create global temporary table t(a int, b int, key(a), key(b)) on commit delete rows") + tk.MustExec("create table t2(a int, b int, key(a), key(b))") + tk.MustGetErrCode("create session binding for select * from t where b = 123 using select * from t ignore index(b) where b = 123;", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for insert into t select * from t2 where t2.b = 1 and t2.c > 1 using insert into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for replace into t select * from t2 where t2.b = 1 and t2.c > 1 using replace into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for update t set a = 1 where b = 1 and c > 1 using update /*+ use_index(t, c) */ t set a = 1 where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for delete from t where b = 1 and c > 1 using delete /*+ use_index(t, c) */ from t where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) +} + +func TestLocalTemporaryTable(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists tmp2") + tk.MustExec("create temporary table tmp2 (a int, b int, key(a), key(b));") + tk.MustGetErrCode("create session binding for select * from tmp2 where b = 123 using select * from t ignore index(b) where b = 123;", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for insert into tmp2 select * from t2 where t2.b = 1 and t2.c > 1 using insert into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for replace into tmp2 select * from t2 where t2.b = 1 and t2.c > 1 using replace into t select /*+ use_index(t2,c) */ * from t2 where t2.b = 1 and t2.c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for update tmp2 set a = 1 where b = 1 and c > 1 using update /*+ use_index(t, c) */ t set a = 1 where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) + tk.MustGetErrCode("create binding for delete from tmp2 where b = 1 and c > 1 using delete /*+ use_index(t, c) */ from t where b = 1 and c > 1", errno.ErrOptOnTemporaryTable) +} + +func TestDropSingleBindings(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b))") + + // Test drop session bindings. + tk.MustExec("create binding for select * from t using select * from t use index(idx_a)") + tk.MustExec("create binding for select * from t using select * from t use index(idx_b)") + rows := tk.MustQuery("show bindings").Rows() + // The size of bindings is equal to one. Because for one normalized sql, + // the `create binding` clears all the origin bindings. + require.Len(t, rows, 1) + require.Equal(t, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)", rows[0][1]) + tk.MustExec("drop binding for select * from t using select * from t use index(idx_a)") + rows = tk.MustQuery("show bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)", rows[0][1]) + tk.MustExec("drop table t") + tk.MustExec("drop binding for select * from t using select * from t use index(idx_b)") + rows = tk.MustQuery("show bindings").Rows() + require.Len(t, rows, 0) + + tk.MustExec("create table t(a int, b int, c int, index idx_a(a), index idx_b(b))") + // Test drop global bindings. + tk.MustExec("create global binding for select * from t using select * from t use index(idx_a)") + tk.MustExec("create global binding for select * from t using select * from t use index(idx_b)") + rows = tk.MustQuery("show global bindings").Rows() + // The size of bindings is equal to one. Because for one normalized sql, + // the `create binding` clears all the origin bindings. + require.Len(t, rows, 1) + require.Equal(t, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)", rows[0][1]) + tk.MustExec("drop global binding for select * from t using select * from t use index(idx_a)") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 1) + require.Equal(t, "SELECT * FROM `test`.`t` USE INDEX (`idx_b`)", rows[0][1]) + tk.MustExec("drop table t") + tk.MustExec("drop global binding for select * from t using select * from t use index(idx_b)") + rows = tk.MustQuery("show global bindings").Rows() + require.Len(t, rows, 0) +} + +func TestPreparedStmt(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + + orgEnable := plannercore.PreparedPlanCacheEnabled() + defer func() { + plannercore.SetPreparedPlanCache(orgEnable) + }() + plannercore.SetPreparedPlanCache(false) // requires plan cache disabled, or the IndexNames = 1 on first test. + + tk.MustExec("use test") + tk.MustExec("drop table if exists t") + tk.MustExec("create table t(a int, b int, index idx(a))") + tk.MustExec(`prepare stmt1 from 'select * from t'`) + tk.MustExec("execute stmt1") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 0) + + tk.MustExec("create binding for select * from t using select * from t use index(idx)") + tk.MustExec("execute stmt1") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + + tk.MustExec("drop binding for select * from t") + tk.MustExec("execute stmt1") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 0) + + tk.MustExec("drop table t") + tk.MustExec("create table t(a int, b int, c int, index idx_b(b), index idx_c(c))") + tk.MustExec("set @p = 1") + + tk.MustExec("prepare stmt from 'delete from t where b = ? and c > ?'") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("create binding for delete from t where b = 2 and c > 2 using delete /*+ use_index(t,idx_c) */ from t where b = 2 and c > 2") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + + tk.MustExec("prepare stmt from 'update t set a = 1 where b = ? and c > ?'") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("create binding for update t set a = 2 where b = 2 and c > 2 using update /*+ use_index(t,idx_c) */ t set a = 2 where b = 2 and c > 2") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + + tk.MustExec("drop table if exists t1") + tk.MustExec("create table t1 like t") + tk.MustExec("prepare stmt from 'insert into t1 select * from t where t.b = ? and t.c > ?'") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("create binding for insert into t1 select * from t where t.b = 2 and t.c > 2 using insert into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 2 and t.c > 2") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + + tk.MustExec("prepare stmt from 'replace into t1 select * from t where t.b = ? and t.c > ?'") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_b", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) + tk.MustExec("create binding for replace into t1 select * from t where t.b = 2 and t.c > 2 using replace into t1 select /*+ use_index(t,idx_c) */ * from t where t.b = 2 and t.c > 2") + tk.MustExec("execute stmt using @p,@p") + require.Len(t, tk.Session().GetSessionVars().StmtCtx.IndexNames, 1) + require.Equal(t, "t:idx_c", tk.Session().GetSessionVars().StmtCtx.IndexNames[0]) +} diff --git a/bindinfo/stat.go b/bindinfo/stat.go new file mode 100644 index 0000000000000..f9c2e681ecb1e --- /dev/null +++ b/bindinfo/stat.go @@ -0,0 +1,40 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo + +import ( + "github.com/pingcap/tidb/sessionctx/variable" +) + +var ( + lastPlanBindingUpdateTime = "last_plan_binding_update_time" +) + +// GetScope gets the status variables scope. +func (h *BindHandle) GetScope(status string) variable.ScopeFlag { + return variable.ScopeSession +} + +// Stats returns the server statistics. +func (h *BindHandle) Stats(vars *variable.SessionVars) (map[string]interface{}, error) { + h.bindInfo.Lock() + defer func() { + h.bindInfo.Unlock() + }() + m := make(map[string]interface{}) + m[lastPlanBindingUpdateTime] = h.bindInfo.lastUpdateTime.String() + + return m, nil +} diff --git a/bindinfo/temptable_test.go b/bindinfo/temptable_test.go new file mode 100644 index 0000000000000..e92314df28d81 --- /dev/null +++ b/bindinfo/temptable_test.go @@ -0,0 +1,92 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bindinfo_test + +import ( + "strings" + "testing" + + "github.com/pingcap/tidb/errno" + "github.com/pingcap/tidb/testkit" +) + +// TestSelectBindingOnGlobalTempTableProhibited covers https://github.com/pingcap/tidb/issues/26377 +func TestSelectBindingOnGlobalTempTableProhibited(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t1,tmp1") + tk.MustExec("create table t1(a int(11))") + tk.MustExec("create global temporary table tmp1(a int(11), key idx_a(a)) on commit delete rows;") + tk.MustExec("create temporary table tmp2(a int(11), key idx_a(a));") + + queries := []string{ + "create global binding for with cte1 as (select a from tmp1) select * from cte1 using with cte1 as (select a from tmp1) select * from cte1", + "create global binding for select * from t1 inner join tmp1 on t1.a=tmp1.a using select * from t1 inner join tmp1 on t1.a=tmp1.a;", + "create global binding for select * from t1 where t1.a in (select a from tmp1) using select * from t1 where t1.a in (select a from tmp1 use index (idx_a));", + "create global binding for select a from t1 union select a from tmp1 using select a from t1 union select a from tmp1 use index (idx_a);", + "create global binding for select t1.a, (select a from tmp1 where tmp1.a=1) as t2 from t1 using select t1.a, (select a from tmp1 where tmp1.a=1) as t2 from t1;", + "create global binding for select * from (select * from tmp1) using select * from (select * from tmp1);", + "create global binding for select * from t1 where t1.a = (select a from tmp1) using select * from t1 where t1.a = (select a from tmp1)", + } + genLocalTemporarySQL := func(sql string) string { + return strings.Replace(sql, "tmp1", "tmp2", -1) + } + for _, query := range queries { + localSQL := genLocalTemporarySQL(query) + queries = append(queries, localSQL) + } + + for _, q := range queries { + tk.MustGetErrCode(q, errno.ErrOptOnTemporaryTable) + } +} + +// TestDMLBindingOnGlobalTempTableProhibited covers https://github.com/pingcap/tidb/issues/27422 +func TestDMLBindingOnGlobalTempTableProhibited(t *testing.T) { + store, clean := testkit.CreateMockStore(t) + defer clean() + + tk := testkit.NewTestKit(t, store) + tk.MustExec("use test") + tk.MustExec("drop table if exists t1,tmp1,tmp2") + tk.MustExec("create table t1(a int(11))") + tk.MustExec("create global temporary table tmp1(a int(11), key idx_a(a)) on commit delete rows;") + tk.MustExec("create temporary table tmp2(a int(11), key idx_a(a));") + + queries := []string{ + "create global binding for insert into t1 (select * from tmp1) using insert into t1 (select * from tmp1);", + "create global binding for update t1 inner join tmp1 on t1.a=tmp1.a set t1.a=1 using update t1 inner join tmp1 on t1.a=tmp1.a set t1.a=1", + "create global binding for update t1 set t1.a=(select a from tmp1) using update t1 set t1.a=(select a from tmp1)", + "create global binding for update t1 set t1.a=1 where t1.a = (select a from tmp1) using update t1 set t1.a=1 where t1.a = (select a from tmp1)", + "create global binding for with cte1 as (select a from tmp1) update t1 set t1.a=1 where t1.a in (select a from cte1) using with cte1 as (select a from tmp1) update t1 set t1.a=1 where t1.a in (select a from cte1)", + "create global binding for delete from t1 where t1.a in (select a from tmp1) using delete from t1 where t1.a in (select a from tmp1)", + "create global binding for delete from t1 where t1.a = (select a from tmp1) using delete from t1 where t1.a = (select a from tmp1)", + "create global binding for delete t1 from t1,tmp1 using delete t1 from t1,tmp1", + } + genLocalTemporarySQL := func(sql string) string { + return strings.Replace(sql, "tmp1", "tmp2", -1) + } + for _, query := range queries { + localSQL := genLocalTemporarySQL(query) + queries = append(queries, localSQL) + } + + for _, q := range queries { + tk.MustGetErrCode(q, errno.ErrOptOnTemporaryTable) + } +} diff --git a/br/.codecov.yml b/br/.codecov.yml new file mode 100644 index 0000000000000..9fc8064faee5c --- /dev/null +++ b/br/.codecov.yml @@ -0,0 +1,22 @@ +codecov: + require_ci_to_pass: yes + +comment: + layout: "reach, diff, flags, files" + behavior: default + require_changes: false # if true: only post the comment if coverage changes + require_base: no # [yes :: must have a base report to post] + require_head: yes # [yes :: must have a head report to post] + branches: # branch names that can post comment + - "master" + +coverage: + status: + project: + default: + # Allow the coverage to drop by 3% + target: 85% + threshold: 3% + branches: + - master + patch: off diff --git a/br/.dockerignore b/br/.dockerignore new file mode 120000 index 0000000000000..3e4e48b0b5fe6 --- /dev/null +++ b/br/.dockerignore @@ -0,0 +1 @@ +.gitignore \ No newline at end of file diff --git a/br/.editorconfig b/br/.editorconfig new file mode 100644 index 0000000000000..43c6a002cce45 --- /dev/null +++ b/br/.editorconfig @@ -0,0 +1,10 @@ +[*] +end_of_line = lf +insert_final_newline = true +charset = utf-8 + +# tab_size = 4 spaces +[*.go] +indent_style = tab +indent_size = 4 +trim_trailing_whitespace = true diff --git a/br/.gitattributes b/br/.gitattributes new file mode 100644 index 0000000000000..ba35fa1000ef6 --- /dev/null +++ b/br/.gitattributes @@ -0,0 +1 @@ +*_generated.go linguist-generated=true diff --git a/br/.gitignore b/br/.gitignore new file mode 100644 index 0000000000000..c29d04732ce40 --- /dev/null +++ b/br/.gitignore @@ -0,0 +1,14 @@ +/br +/bin +/.idea +/docker/data/ +/docker/logs/ +*.swp +.DS_Store +/go.mod +/go.sum + +# for the web interface +web/node_modules/ +web/dist/ +.vscode/ diff --git a/br/.golangci.yml b/br/.golangci.yml new file mode 100644 index 0000000000000..0cb2a9b651251 --- /dev/null +++ b/br/.golangci.yml @@ -0,0 +1,12 @@ +linters-settings: + gocyclo: + min-complexity: 40 + +issues: + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # TODO Remove it. + - path: split_client.go + text: "SA1019:" + linters: + - staticcheck diff --git a/br/COMPATIBILITY_TEST.md b/br/COMPATIBILITY_TEST.md new file mode 100644 index 0000000000000..b5580835baee8 --- /dev/null +++ b/br/COMPATIBILITY_TEST.md @@ -0,0 +1,42 @@ +# Compatibility test + +## Background + +We had some incompatibility issues in the past, which made BR cannot restore backed up data in some situations. +So we need a test workflow to check the compatiblity. + +## Goal + +- Ensure backward compatibility for restoring data from the previous 3 minor versions + +## Workflow + +### Data Preparation + +This workflow needs previous backup data. To get this data. we perform the following steps + +- Run a TiDB cluster with previous version. +- Run backup jobs with corresponding BR version, with different storages (s3, gcs). + +Given we test for the previous 3 versions, and there are 2 different storage systems, we will produce 6 backup archives for 6 separate compatibility tests. + +### Test Content + +- Start TiDB cluster with nightly version. +- Build BR binary with current directory. +- Use BR to restore different version backup data one by one. +- Make sure restore data is expected. + +### Running tests + +Start a cluster with docker-compose and Build br with latest version. + +```sh +docker-compose -f docker-compose.yaml rm -s -v && \ +docker-compose -f docker-compose.yaml build && \ +docker-compose -f docker-compose.yaml up --remove-orphans +``` + +```sh +docker-compose -f docker-compose.yaml control make compatibility_test +``` diff --git a/br/CONTRIBUTING.md b/br/CONTRIBUTING.md new file mode 100644 index 0000000000000..1f2846471a7a1 --- /dev/null +++ b/br/CONTRIBUTING.md @@ -0,0 +1,90 @@ +# How to contribute + +This document outlines some of the conventions on development workflow, commit +message formatting, contact points and other resources to make it easier to get +your contribution accepted. + +## Getting started + +- Fork the repository on GitHub. +- Read the README.md for build instructions. +- Play with the project, submit bugs, submit patches! + +## Building BR + +Developing BR requires: + +* [Go 1.16+](http://golang.org/doc/code.html) +* An internet connection to download the dependencies + +Simply run `make` to build the program. + +```sh +make +``` + +### Running tests + +This project contains unit tests and integration tests with coverage collection. +See [tests/README.md](./tests/README.md) for how to execute and add tests. + +### Updating dependencies + +BR uses [Go 1.11 module](https://github.com/golang/go/wiki/Modules) to manage dependencies. +To add or update a dependency: use the `go mod edit` command to change the dependency. + +## Contribution flow + +This is a rough outline of what a contributor's workflow looks like: + +- Create a topic branch from where you want to base your work. This is usually `master`. +- Make commits of logical units and add test case if the change fixes a bug or adds new functionality. +- Run tests and make sure all the tests are passed. +- Make sure your commit messages are in the proper format (see below). +- Push your changes to a topic branch in your fork of the repository. +- Submit a pull request. +- Your PR must receive LGTMs from two maintainers. + +Thanks for your contributions! + +### Code style + +The coding style suggested by the Golang community is used in BR. +See the [style doc](https://github.com/golang/go/wiki/CodeReviewComments) for details. + +Please follow this style to make BR easy to review, maintain and develop. + +### Format of the Commit Message + +We follow a rough convention for commit messages that is designed to answer two +questions: what changed and why. The subject line should feature the what and +the body of the commit should describe the why. + +``` +restore: add comment for variable declaration + +Improve documentation. +``` + +The format can be described more formally as follows: + +``` +: + + + +