diff --git a/Makefile b/Makefile index b6b3cab07a462..9c92292dfe714 100644 --- a/Makefile +++ b/Makefile @@ -421,6 +421,7 @@ dumpling_tidy: dumpling_bins: @which bin/tidb-server @which bin/minio + @which bin/mc @which bin/tidb-lightning @which bin/sync_diff_inspector diff --git a/br/pkg/lightning/lightning.go b/br/pkg/lightning/lightning.go index 3c6f0256a740c..dc389bebbc6ab 100644 --- a/br/pkg/lightning/lightning.go +++ b/br/pkg/lightning/lightning.go @@ -615,7 +615,8 @@ func (l *Lightning) handlePostTask(w http.ResponseWriter, req *http.Request) { writeJSONError(w, http.StatusBadRequest, "cannot read request", err) return } - log.L().Info("received task config", zap.ByteString("content", data)) + filteredData := utils.HideSensitive(string(data)) + log.L().Info("received task config", zap.String("content", filteredData)) cfg := config.NewConfig() if err = cfg.LoadFromGlobal(l.globalCfg); err != nil { diff --git a/br/pkg/utils/sensitive.go b/br/pkg/utils/sensitive.go new file mode 100644 index 0000000000000..fcc31ee30b78d --- /dev/null +++ b/br/pkg/utils/sensitive.go @@ -0,0 +1,23 @@ +// Copyright 2022 PingCAP, Inc. Licensed under Apache-2.0. + +package utils + +import ( + "regexp" +) + +var ( + passwordPatterns = `(password[\s]*=[\s]*(\\")?)(.*?)((\\")?\\n)` + + passwordRegexp *regexp.Regexp +) + +func init() { + passwordRegexp = regexp.MustCompile(passwordPatterns) +} + +// HideSensitive replace password with ******. +func HideSensitive(input string) string { + output := passwordRegexp.ReplaceAllString(input, "$1******$4") + return output +} diff --git a/br/pkg/utils/sensitive_test.go b/br/pkg/utils/sensitive_test.go new file mode 100644 index 0000000000000..a14ce0619eb85 --- /dev/null +++ b/br/pkg/utils/sensitive_test.go @@ -0,0 +1,41 @@ +// Copyright 2022 PingCAP, Inc. Licensed under Apache-2.0. + +package utils + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestHideSensitive(t *testing.T) { + strs := []struct { + old string + new string + }{ + { + `host = "127.0.0.1"\n user = "root"\n password = "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs="\n port = 3306\n`, + `host = "127.0.0.1"\n user = "root"\n password = ******\n port = 3306\n`, + }, + { + `host = "127.0.0.1"\n user = "root"\n password = ""\n port = 3306\n`, + `host = "127.0.0.1"\n user = "root"\n password = ******\n port = 3306\n`, + }, + { + `host = "127.0.0.1"\n user = "root"\n password= "/Q7B9DizNLLTTfiZHv9WoEAKamfpIUs="\n port = 3306\n`, + `host = "127.0.0.1"\n user = "root"\n password= ******\n port = 3306\n`, + }, + { + `host = "127.0.0.1"\n user = "root"\n password =""\n port = 3306\n`, + `host = "127.0.0.1"\n user = "root"\n password =******\n port = 3306\n`, + }, + { + `host = "127.0.0.1"\n user = "root"\n password=""\n port = 3306\n`, + `host = "127.0.0.1"\n user = "root"\n password=******\n port = 3306\n`, + }, + } + for i, str := range strs { + t.Logf("case #%d\n", i) + require.Equal(t, str.new, HideSensitive(str.old)) + } +} diff --git a/dumpling/install.sh b/dumpling/install.sh index e51ee861b099c..65fd56e454c97 100644 --- a/dumpling/install.sh +++ b/dumpling/install.sh @@ -27,3 +27,6 @@ mv tidb/bin/tidb-server bin/ # download minio wget https://dl.min.io/server/minio/release/linux-amd64/minio -O bin/minio chmod a+x bin/minio + +wget https://dl.minio.io/client/mc/release/linux-amd64/mc -O bin/mc +chmod a+x bin/mc diff --git a/dumpling/tests/s3/run.sh b/dumpling/tests/s3/run.sh index a5011e4707946..ede8c316106ed 100755 --- a/dumpling/tests/s3/run.sh +++ b/dumpling/tests/s3/run.sh @@ -47,38 +47,54 @@ run_sql "create database $DB_NAME DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;" (cd "$(dirname "$0")" && GO111MODULE=on go build -o out) $DUMPLING_BASE_NAME/out -B $DB_NAME -T $TABLE_NAME -P 3306 -w 16 -# run dumpling! HOST_DIR=${DUMPLING_OUTPUT_DIR} + +# run dumpling local +export DUMPLING_OUTPUT_DIR=${HOST_DIR}/local +run_dumpling + +# run dumpling s3 export DUMPLING_OUTPUT_DIR=s3://mybucket/dump export DUMPLING_TEST_DATABASE=$DB_NAME export AWS_REGION=us-east-1 export AWS_ACCESS_KEY_ID="$MINIO_ACCESS_KEY" export AWS_SECRET_ACCESS_KEY="$MINIO_SECRET_KEY" run_dumpling --s3.endpoint="http://$S3_ENDPOINT/" -ls "${HOST_DIR}" +ls "${HOST_DIR}" -R + +mkdir -p "${HOST_DIR}/remote" + +bin/mc config host add minio http://127.0.0.1:5000 testid testkey8 +bin/mc cp minio/mybucket/dump/s3-schema-create.sql "${HOST_DIR}/remote/s3-schema-create.sql" +bin/mc cp minio/mybucket/dump/s3.t-schema.sql "${HOST_DIR}/remote/s3.t-schema.sql" +bin/mc cp minio/mybucket/dump/s3.t.000000000.sql "${HOST_DIR}/remote/s3.t.000000000.sql" -file_should_exist "$DBPATH/mybucket/dump/s3-schema-create.sql" -file_should_exist "$DBPATH/mybucket/dump/s3.t-schema.sql" -file_should_exist "$DBPATH/mybucket/dump/s3.t.000000000.sql" +diff "${HOST_DIR}/local/s3-schema-create.sql" "${HOST_DIR}/remote/s3-schema-create.sql" +diff "${HOST_DIR}/local/s3.t-schema.sql" "${HOST_DIR}/remote/s3.t-schema.sql" +diff "${HOST_DIR}/local/s3.t.000000000.sql" "${HOST_DIR}/remote/s3.t.000000000.sql" -cnt=`grep -o "('aaaaaaaaaa')" $DBPATH/mybucket/dump/s3.t.000000000.sql|wc -l` +cnt=`grep -o "('aaaaaaaaaa')" ${HOST_DIR}/remote/s3.t.000000000.sql|wc -l` echo "1st records count is ${cnt}" [ $cnt = 5000000 ] # run dumpling with compress option -mv "$DBPATH/mybucket/dump" "$DBPATH/mybucket/expect" +export DUMPLING_OUTPUT_DIR=s3://mybucket/dump-compress + run_dumpling --s3.endpoint="http://$S3_ENDPOINT/" --compress "gzip" -file_should_exist "$DBPATH/mybucket/dump/s3-schema-create.sql.gz" -file_should_exist "$DBPATH/mybucket/dump/s3.t-schema.sql.gz" -file_should_exist "$DBPATH/mybucket/dump/s3.t.000000000.sql.gz" -gzip "$DBPATH/mybucket/dump/s3-schema-create.sql.gz" -d -diff "$DBPATH/mybucket/expect/s3-schema-create.sql" "$DBPATH/mybucket/dump/s3-schema-create.sql" +mkdir -p "${HOST_DIR}/compress" + +bin/mc cp minio/mybucket/dump-compress/s3-schema-create.sql.gz "${HOST_DIR}/compress/s3-schema-create.sql.gz" +bin/mc cp minio/mybucket/dump-compress/s3.t-schema.sql.gz "${HOST_DIR}/compress/s3.t-schema.sql.gz" +bin/mc cp minio/mybucket/dump-compress/s3.t.000000000.sql.gz "${HOST_DIR}/compress/s3.t.000000000.sql.gz" + +gzip "${HOST_DIR}/compress/s3-schema-create.sql.gz" -d +diff "${HOST_DIR}/local/s3-schema-create.sql" "${HOST_DIR}/compress/s3-schema-create.sql" -gzip "$DBPATH/mybucket/dump/s3.t-schema.sql.gz" -d -diff "$DBPATH/mybucket/expect/s3.t-schema.sql" "$DBPATH/mybucket/dump/s3.t-schema.sql" +gzip "${HOST_DIR}/compress/s3.t-schema.sql.gz" -d +diff "${HOST_DIR}/local/s3.t-schema.sql" "${HOST_DIR}/compress/s3.t-schema.sql" -gzip "$DBPATH/mybucket/dump/s3.t.000000000.sql.gz" -d -diff "$DBPATH/mybucket/expect/s3.t.000000000.sql" "$DBPATH/mybucket/dump/s3.t.000000000.sql" +gzip "${HOST_DIR}/compress/s3.t.000000000.sql.gz" -d +diff "${HOST_DIR}/local/s3.t.000000000.sql" "${HOST_DIR}/compress/s3.t.000000000.sql" run_sql "drop database if exists \`$DB_NAME\`;"