From 0f31b65d7984a0157de634d31bbc66b0f29c8f97 Mon Sep 17 00:00:00 2001 From: Vee Zhang Date: Thu, 2 Mar 2023 13:50:38 +0800 Subject: [PATCH] refactored --- .dockerignore | 4 - .editorconfig | 12 - .github/workflows/build.yml | 50 + .github/workflows/package.yaml | 67 - .github/workflows/pull_request.yaml | 24 - .github/workflows/release.yaml | 72 - .gitignore | 33 +- .golangci.yml | 246 +++ .goreleaser.yaml | 115 ++ Dockerfile | 29 +- Makefile | 91 +- README.md | 412 +--- README_zh-CN.md | 382 ---- ci/bootstrap.sh | 47 - cmd/importer.go | 84 - cmd/nebula-importer/main.go | 13 + docker-compose.yaml | 139 -- docs/configuration-reference.md | 56 - docs/golang-install-en.md | 39 - docs/golang-install.md | 39 - examples/basic/README.md | 0 examples/basic/basic.int.v3.yaml | 150 ++ examples/basic/basic.string.v3.yaml | 99 + examples/basic/knows.csv | 9 + examples/basic/person.csv | 13 + examples/hdfs/hdfs.v3.yaml | 50 + examples/{sf => ldbc-snb}/.gitignore | 2 +- examples/{sf => ldbc-snb}/README.md | 0 examples/ldbc-snb/ldbc-snb.v3.yaml | 578 ++++++ examples/s3/s3.v3.yaml | 70 + examples/sf/sf.yaml | 873 -------- examples/v1/choose-hex.csv | 6 - examples/v1/choose.csv | 7 - examples/v1/course-with-header.csv | 5 - examples/v1/course.csv | 2 - examples/v1/example.yaml | 498 ----- examples/v1/follow-delimiter.csv | 4 - examples/v1/follow-with-header.csv | 4 - examples/v1/follow-with-label-and-str-vid.csv | 5 - examples/v1/follow-with-label.csv | 5 - examples/v1/follow.csv | 4 - .../v1/student-with-label-and-str-vid.csv | 4 - examples/v1/student.csv | 3 - examples/v2/basic_type_test.csv | 12 - examples/v2/choose.csv | 4 - examples/v2/course-lazy-quotes.csv | 7 - examples/v2/course-with-header.csv | 5 - examples/v2/course.csv | 7 - examples/v2/data/course.csv | 7 - examples/v2/date_test.csv | 3 - examples/v2/example.yaml | 744 ------- examples/v2/example_with_working_dir.yaml | 44 - examples/v2/follow-delimiter.csv | 4 - examples/v2/follow-with-header.csv | 4 - examples/v2/follow-with-label-and-str-vid.csv | 5 - examples/v2/follow-with-label.csv | 5 - examples/v2/follow.csv | 4 - examples/v2/geography_test.csv | 4 - examples/v2/glob-follow-1.csv | 3 - examples/v2/glob-follow-2.csv | 1 - .../v2/student-with-label-and-str-vid.csv | 4 - examples/v2/student.csv | 3 - go.mod | 58 +- go.sum | 564 +++++- pkg/base/stats.go | 55 - pkg/base/stoppable.go | 5 - pkg/base/tools.go | 66 - pkg/base/tools_test.go | 39 - pkg/base/types.go | 96 - pkg/bytebufferpool/bytebufferpool.go | 69 + .../bytebufferpool_suite_test.go | 13 + pkg/bytebufferpool/bytebufferpool_test.go | 63 + pkg/client/client.go | 138 ++ pkg/client/client_mock.go | 77 + pkg/client/client_suite_test.go | 13 + pkg/client/client_test.go | 241 +++ pkg/client/clientmgr.go | 48 - pkg/client/clientpool.go | 262 --- pkg/client/errors.go | 5 + pkg/client/logger.go | 27 + pkg/client/logger_test.go | 17 + pkg/client/option.go | 180 ++ pkg/client/option_test.go | 131 ++ pkg/client/pool.go | 210 ++ pkg/client/pool_mock.go | 111 ++ pkg/client/pool_test.go | 383 ++++ pkg/client/response.go | 15 + pkg/client/response_mock.go | 119 ++ pkg/client/response_v3.go | 55 + pkg/client/response_v3_test.go | 78 + pkg/client/session.go | 8 + pkg/client/session_mock.go | 77 + pkg/client/session_v3.go | 75 + pkg/client/session_v3_test.go | 79 + pkg/cmd/common/io.go | 9 + pkg/cmd/nebula-importer.go | 124 ++ pkg/cmd/nebula-importer_suite_test.go | 13 + pkg/cmd/nebula-importer_test.go | 147 ++ pkg/cmd/runner.go | 143 -- pkg/cmd/testdata/build-failed.yaml | 51 + pkg/cmd/testdata/edge1.csv | 1 + pkg/cmd/testdata/nebula-importer.v3.yaml | 143 ++ pkg/cmd/testdata/node1.csv | 1 + pkg/cmd/testdata/optimize-failed.yaml | 35 + pkg/cmd/util/cmd.go | 13 + pkg/cmd/util/cmd_test.go | 25 + pkg/cmd/util/error.go | 31 + pkg/cmd/util/error_test.go | 37 + pkg/cmd/util/util.go | 1 + pkg/cmd/util/util_suite_test.go | 13 + pkg/config/base/client.go | 51 + pkg/config/base/client_test.go | 92 + pkg/config/base/config.go | 15 + pkg/config/base/config_suite_test.go | 13 + pkg/config/base/log.go | 49 + pkg/config/base/log_test.go | 93 + pkg/config/base/manager.go | 17 + pkg/config/base/source.go | 35 + pkg/config/base/source_test.go | 75 + pkg/config/config.go | 1001 +--------- pkg/config/config_suite_test.go | 13 + pkg/config/config_test.go | 1282 +----------- pkg/config/testdata/nebula-importer.v3.yaml | 143 ++ .../testdata/test-parse-after-period.yaml | 31 - .../testdata/test-parse-concat-items.yaml | 31 - pkg/config/testdata/test-parse-log-path.yaml | 29 - pkg/config/testdata/test-parse-no-files.yaml | 12 - pkg/config/testdata/test-parse-version.yaml | 28 - pkg/config/v3/config.go | 112 ++ pkg/config/v3/config_suite_test.go | 13 + pkg/config/v3/config_test.go | 146 ++ pkg/config/v3/manager.go | 56 + pkg/config/v3/manager_test.go | 60 + pkg/config/v3/source.go | 103 + pkg/config/v3/source_test.go | 287 +++ pkg/config/v3/testdata/file10 | 0 pkg/config/v3/testdata/file11 | 0 pkg/config/v3/testdata/file20 | 0 pkg/csv/csv_test.go | 28 - pkg/csv/errwriter.go | 60 - pkg/csv/reader.go | 105 - pkg/errhandler/datawriter.go | 14 - pkg/errhandler/handler.go | 80 - pkg/errors/errors.go | 26 + pkg/errors/errors_suite_test.go | 13 + pkg/errors/import.go | 213 ++ pkg/errors/import_test.go | 190 ++ pkg/errors/wrapper.go | 28 - pkg/importer/importer.go | 110 + pkg/importer/importer_mock.go | 78 + pkg/importer/importer_suite_test.go | 13 + pkg/importer/importer_test.go | 138 ++ pkg/logger/adapter.go | 31 - pkg/logger/field.go | 21 + pkg/logger/field_test.go | 28 + pkg/logger/level.go | 45 + pkg/logger/level_test.go | 31 + pkg/logger/logger.go | 134 +- pkg/logger/logger_suite_test.go | 13 + pkg/logger/logger_test.go | 23 + pkg/logger/nop.go | 21 + pkg/logger/nop_test.go | 28 + pkg/logger/option.go | 56 + pkg/logger/option_test.go | 62 + pkg/logger/zap.go | 148 ++ pkg/logger/zap_test.go | 348 ++++ pkg/manager/hooks.go | 22 + pkg/manager/manager.go | 417 ++++ pkg/manager/manager_mock.go | 113 ++ pkg/manager/manager_suite_test.go | 13 + pkg/manager/manager_test.go | 599 ++++++ pkg/picker/config.go | 44 +- pkg/picker/config_test.go | 1771 ++++++++--------- pkg/picker/converter-error.go | 11 - ...verter-default.go => converter_default.go} | 0 pkg/picker/converter_default_test.go | 32 + ...rter-function.go => converter_function.go} | 6 +- pkg/picker/converter_function_test.go | 38 + .../{converter-non.go => converter_non.go} | 0 pkg/picker/converter_non_test.go | 20 + .../{converter-null.go => converter_null.go} | 0 pkg/picker/converter_null_test.go | 60 + pkg/picker/converter_test.go | 212 ++ .../{converter-type.go => converter_type.go} | 41 +- pkg/picker/converter_type_test.go | 192 ++ pkg/picker/picker-concat.go | 47 - pkg/picker/picker-constant.go | 14 - pkg/picker/picker-index.go | 22 - pkg/picker/picker.go | 10 + pkg/picker/picker_concat.go | 86 + pkg/picker/picker_concat_test.go | 84 + pkg/picker/picker_constant.go | 11 + pkg/picker/picker_constant_test.go | 18 + pkg/picker/picker_index.go | 19 + pkg/picker/picker_index_test.go | 43 + pkg/picker/picker_suite_test.go | 13 + pkg/picker/picker_test.go | 197 ++ pkg/picker/value.go | 22 + pkg/reader/batch.go | 123 +- pkg/reader/batch_mock.go | 81 + pkg/reader/batch_test.go | 282 +++ pkg/reader/batchmgr.go | 344 ---- pkg/reader/csv.go | 99 + pkg/reader/csv_test.go | 219 ++ pkg/reader/option.go | 54 + pkg/reader/option_test.go | 27 + pkg/reader/reader.go | 183 +- pkg/reader/reader_suite_test.go | 13 + pkg/reader/record.go | 20 + pkg/reader/record_mock.go | 81 + pkg/reader/record_test.go | 33 + pkg/reader/testdata/local.csv | 4 + pkg/reader/testdata/local_delimiter.csv | 4 + pkg/reader/testdata/local_failed.csv | 4 + pkg/reader/testdata/local_withHeader.csv | 2 + .../testdata/local_withHeader_failed.csv | 1 + pkg/source/config.go | 45 + pkg/source/config_test.go | 82 + pkg/source/ftp.go | 91 + pkg/source/ftp_test.go | 238 +++ pkg/source/hdfs.go | 87 + pkg/source/hdfs_test.go | 155 ++ pkg/source/local.go | 61 + pkg/source/local_test.go | 64 + pkg/source/oss.go | 91 + pkg/source/oss_test.go | 207 ++ pkg/source/s3.go | 91 + pkg/source/s3_test.go | 105 + pkg/source/sftp.go | 133 ++ pkg/source/sftp_test.go | 435 ++++ pkg/source/source.go | 39 + pkg/source/source_mock.go | 158 ++ pkg/source/source_suite_test.go | 13 + pkg/source/source_test.go | 74 + pkg/source/testdata/local.txt | 1 + pkg/spec/base/builder.go | 15 + pkg/spec/base/builder_mock.go | 53 + pkg/spec/base/builder_test.go | 17 + pkg/spec/base/record.go | 6 + pkg/spec/base/spec.go | 1 + pkg/spec/base/spec_suite_test.go | 13 + pkg/spec/spec.go | 10 + pkg/spec/v3/edge.go | 278 +++ pkg/spec/v3/edge_test.go | 701 +++++++ pkg/spec/v3/graph.go | 122 ++ pkg/spec/v3/graph_test.go | 299 +++ pkg/spec/v3/node.go | 148 ++ pkg/spec/v3/node_test.go | 313 +++ pkg/spec/v3/nodeid.go | 86 + pkg/spec/v3/nodeid_test.go | 169 ++ pkg/spec/v3/prop.go | 121 ++ pkg/spec/v3/prop_test.go | 447 +++++ pkg/spec/v3/rank.go | 48 + pkg/spec/v3/rank_test.go | 86 + pkg/spec/v3/record.go | 8 + pkg/spec/v3/spec.go | 7 + pkg/spec/v3/spec_suite_test.go | 13 + pkg/spec/v3/value.go | 69 + pkg/spec/v3/value_test.go | 70 + pkg/stats/concurrency_stats.go | 74 + pkg/stats/concurrency_stats_test.go | 106 + pkg/stats/stats.go | 74 + pkg/stats/stats_suite_test.go | 13 + pkg/stats/stats_test.go | 35 + pkg/stats/statsmgr.go | 145 -- pkg/utils/path.go | 11 + pkg/utils/path_test.go | 54 + pkg/utils/string.go | 18 + pkg/utils/string_test.go | 119 ++ pkg/utils/testdata/file10 | 0 pkg/utils/testdata/file11 | 0 pkg/utils/testdata/file20 | 0 pkg/utils/utils.go | 2 + pkg/utils/utils_suite_test.go | 13 + pkg/utils/waitgroupmap.go | 132 ++ pkg/utils/waitgroupmap_test.go | 81 + pkg/version/version.go | 53 +- pkg/version/version_suite_test.go | 13 + pkg/version/version_test.go | 54 + pkg/web/httpserver.go | 237 --- pkg/web/taskmgr.go | 52 - 281 files changed, 18324 insertions(+), 9234 deletions(-) delete mode 100644 .dockerignore delete mode 100644 .editorconfig create mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/package.yaml delete mode 100644 .github/workflows/pull_request.yaml delete mode 100644 .github/workflows/release.yaml create mode 100644 .golangci.yml create mode 100644 .goreleaser.yaml delete mode 100644 README_zh-CN.md delete mode 100755 ci/bootstrap.sh delete mode 100644 cmd/importer.go create mode 100644 cmd/nebula-importer/main.go delete mode 100644 docker-compose.yaml delete mode 100644 docs/configuration-reference.md delete mode 100644 docs/golang-install-en.md delete mode 100644 docs/golang-install.md create mode 100644 examples/basic/README.md create mode 100644 examples/basic/basic.int.v3.yaml create mode 100644 examples/basic/basic.string.v3.yaml create mode 100644 examples/basic/knows.csv create mode 100644 examples/basic/person.csv create mode 100644 examples/hdfs/hdfs.v3.yaml rename examples/{sf => ldbc-snb}/.gitignore (60%) rename examples/{sf => ldbc-snb}/README.md (100%) create mode 100644 examples/ldbc-snb/ldbc-snb.v3.yaml create mode 100644 examples/s3/s3.v3.yaml delete mode 100644 examples/sf/sf.yaml delete mode 100644 examples/v1/choose-hex.csv delete mode 100644 examples/v1/choose.csv delete mode 100644 examples/v1/course-with-header.csv delete mode 100644 examples/v1/course.csv delete mode 100644 examples/v1/example.yaml delete mode 100644 examples/v1/follow-delimiter.csv delete mode 100644 examples/v1/follow-with-header.csv delete mode 100644 examples/v1/follow-with-label-and-str-vid.csv delete mode 100644 examples/v1/follow-with-label.csv delete mode 100644 examples/v1/follow.csv delete mode 100644 examples/v1/student-with-label-and-str-vid.csv delete mode 100644 examples/v1/student.csv delete mode 100644 examples/v2/basic_type_test.csv delete mode 100644 examples/v2/choose.csv delete mode 100644 examples/v2/course-lazy-quotes.csv delete mode 100644 examples/v2/course-with-header.csv delete mode 100644 examples/v2/course.csv delete mode 100644 examples/v2/data/course.csv delete mode 100644 examples/v2/date_test.csv delete mode 100644 examples/v2/example.yaml delete mode 100644 examples/v2/example_with_working_dir.yaml delete mode 100644 examples/v2/follow-delimiter.csv delete mode 100644 examples/v2/follow-with-header.csv delete mode 100644 examples/v2/follow-with-label-and-str-vid.csv delete mode 100644 examples/v2/follow-with-label.csv delete mode 100644 examples/v2/follow.csv delete mode 100644 examples/v2/geography_test.csv delete mode 100644 examples/v2/glob-follow-1.csv delete mode 100644 examples/v2/glob-follow-2.csv delete mode 100644 examples/v2/student-with-label-and-str-vid.csv delete mode 100644 examples/v2/student.csv delete mode 100644 pkg/base/stats.go delete mode 100644 pkg/base/stoppable.go delete mode 100644 pkg/base/tools.go delete mode 100644 pkg/base/tools_test.go delete mode 100644 pkg/base/types.go create mode 100644 pkg/bytebufferpool/bytebufferpool.go create mode 100644 pkg/bytebufferpool/bytebufferpool_suite_test.go create mode 100644 pkg/bytebufferpool/bytebufferpool_test.go create mode 100644 pkg/client/client.go create mode 100644 pkg/client/client_mock.go create mode 100644 pkg/client/client_suite_test.go create mode 100644 pkg/client/client_test.go delete mode 100644 pkg/client/clientmgr.go delete mode 100644 pkg/client/clientpool.go create mode 100644 pkg/client/errors.go create mode 100644 pkg/client/logger.go create mode 100644 pkg/client/logger_test.go create mode 100644 pkg/client/option.go create mode 100644 pkg/client/option_test.go create mode 100644 pkg/client/pool.go create mode 100644 pkg/client/pool_mock.go create mode 100644 pkg/client/pool_test.go create mode 100644 pkg/client/response.go create mode 100644 pkg/client/response_mock.go create mode 100644 pkg/client/response_v3.go create mode 100644 pkg/client/response_v3_test.go create mode 100644 pkg/client/session.go create mode 100644 pkg/client/session_mock.go create mode 100644 pkg/client/session_v3.go create mode 100644 pkg/client/session_v3_test.go create mode 100644 pkg/cmd/common/io.go create mode 100644 pkg/cmd/nebula-importer.go create mode 100644 pkg/cmd/nebula-importer_suite_test.go create mode 100644 pkg/cmd/nebula-importer_test.go delete mode 100644 pkg/cmd/runner.go create mode 100644 pkg/cmd/testdata/build-failed.yaml create mode 100644 pkg/cmd/testdata/edge1.csv create mode 100644 pkg/cmd/testdata/nebula-importer.v3.yaml create mode 100644 pkg/cmd/testdata/node1.csv create mode 100644 pkg/cmd/testdata/optimize-failed.yaml create mode 100644 pkg/cmd/util/cmd.go create mode 100644 pkg/cmd/util/cmd_test.go create mode 100644 pkg/cmd/util/error.go create mode 100644 pkg/cmd/util/error_test.go create mode 100644 pkg/cmd/util/util.go create mode 100644 pkg/cmd/util/util_suite_test.go create mode 100644 pkg/config/base/client.go create mode 100644 pkg/config/base/client_test.go create mode 100644 pkg/config/base/config.go create mode 100644 pkg/config/base/config_suite_test.go create mode 100644 pkg/config/base/log.go create mode 100644 pkg/config/base/log_test.go create mode 100644 pkg/config/base/manager.go create mode 100644 pkg/config/base/source.go create mode 100644 pkg/config/base/source_test.go create mode 100644 pkg/config/config_suite_test.go create mode 100644 pkg/config/testdata/nebula-importer.v3.yaml delete mode 100644 pkg/config/testdata/test-parse-after-period.yaml delete mode 100644 pkg/config/testdata/test-parse-concat-items.yaml delete mode 100644 pkg/config/testdata/test-parse-log-path.yaml delete mode 100644 pkg/config/testdata/test-parse-no-files.yaml delete mode 100644 pkg/config/testdata/test-parse-version.yaml create mode 100644 pkg/config/v3/config.go create mode 100644 pkg/config/v3/config_suite_test.go create mode 100644 pkg/config/v3/config_test.go create mode 100644 pkg/config/v3/manager.go create mode 100644 pkg/config/v3/manager_test.go create mode 100644 pkg/config/v3/source.go create mode 100644 pkg/config/v3/source_test.go create mode 100644 pkg/config/v3/testdata/file10 create mode 100644 pkg/config/v3/testdata/file11 create mode 100644 pkg/config/v3/testdata/file20 delete mode 100644 pkg/csv/csv_test.go delete mode 100644 pkg/csv/errwriter.go delete mode 100644 pkg/csv/reader.go delete mode 100644 pkg/errhandler/datawriter.go delete mode 100644 pkg/errhandler/handler.go create mode 100644 pkg/errors/errors.go create mode 100644 pkg/errors/errors_suite_test.go create mode 100644 pkg/errors/import.go create mode 100644 pkg/errors/import_test.go delete mode 100644 pkg/errors/wrapper.go create mode 100644 pkg/importer/importer.go create mode 100644 pkg/importer/importer_mock.go create mode 100644 pkg/importer/importer_suite_test.go create mode 100644 pkg/importer/importer_test.go delete mode 100644 pkg/logger/adapter.go create mode 100644 pkg/logger/field.go create mode 100644 pkg/logger/field_test.go create mode 100644 pkg/logger/level.go create mode 100644 pkg/logger/level_test.go create mode 100644 pkg/logger/logger_suite_test.go create mode 100644 pkg/logger/logger_test.go create mode 100644 pkg/logger/nop.go create mode 100644 pkg/logger/nop_test.go create mode 100644 pkg/logger/option.go create mode 100644 pkg/logger/option_test.go create mode 100644 pkg/logger/zap.go create mode 100644 pkg/logger/zap_test.go create mode 100644 pkg/manager/hooks.go create mode 100644 pkg/manager/manager.go create mode 100644 pkg/manager/manager_mock.go create mode 100644 pkg/manager/manager_suite_test.go create mode 100644 pkg/manager/manager_test.go delete mode 100644 pkg/picker/converter-error.go rename pkg/picker/{converter-default.go => converter_default.go} (100%) create mode 100644 pkg/picker/converter_default_test.go rename pkg/picker/{converter-function.go => converter_function.go} (86%) create mode 100644 pkg/picker/converter_function_test.go rename pkg/picker/{converter-non.go => converter_non.go} (100%) create mode 100644 pkg/picker/converter_non_test.go rename pkg/picker/{converter-null.go => converter_null.go} (100%) create mode 100644 pkg/picker/converter_null_test.go create mode 100644 pkg/picker/converter_test.go rename pkg/picker/{converter-type.go => converter_type.go} (79%) create mode 100644 pkg/picker/converter_type_test.go delete mode 100644 pkg/picker/picker-concat.go delete mode 100644 pkg/picker/picker-constant.go delete mode 100644 pkg/picker/picker-index.go create mode 100644 pkg/picker/picker_concat.go create mode 100644 pkg/picker/picker_concat_test.go create mode 100644 pkg/picker/picker_constant.go create mode 100644 pkg/picker/picker_constant_test.go create mode 100644 pkg/picker/picker_index.go create mode 100644 pkg/picker/picker_index_test.go create mode 100644 pkg/picker/picker_suite_test.go create mode 100644 pkg/picker/picker_test.go create mode 100644 pkg/reader/batch_mock.go create mode 100644 pkg/reader/batch_test.go delete mode 100644 pkg/reader/batchmgr.go create mode 100644 pkg/reader/csv.go create mode 100644 pkg/reader/csv_test.go create mode 100644 pkg/reader/option.go create mode 100644 pkg/reader/option_test.go create mode 100644 pkg/reader/reader_suite_test.go create mode 100644 pkg/reader/record.go create mode 100644 pkg/reader/record_mock.go create mode 100644 pkg/reader/record_test.go create mode 100644 pkg/reader/testdata/local.csv create mode 100644 pkg/reader/testdata/local_delimiter.csv create mode 100644 pkg/reader/testdata/local_failed.csv create mode 100644 pkg/reader/testdata/local_withHeader.csv create mode 100644 pkg/reader/testdata/local_withHeader_failed.csv create mode 100644 pkg/source/config.go create mode 100644 pkg/source/config_test.go create mode 100644 pkg/source/ftp.go create mode 100644 pkg/source/ftp_test.go create mode 100644 pkg/source/hdfs.go create mode 100644 pkg/source/hdfs_test.go create mode 100644 pkg/source/local.go create mode 100644 pkg/source/local_test.go create mode 100644 pkg/source/oss.go create mode 100644 pkg/source/oss_test.go create mode 100644 pkg/source/s3.go create mode 100644 pkg/source/s3_test.go create mode 100644 pkg/source/sftp.go create mode 100644 pkg/source/sftp_test.go create mode 100644 pkg/source/source.go create mode 100644 pkg/source/source_mock.go create mode 100644 pkg/source/source_suite_test.go create mode 100644 pkg/source/source_test.go create mode 100644 pkg/source/testdata/local.txt create mode 100644 pkg/spec/base/builder.go create mode 100644 pkg/spec/base/builder_mock.go create mode 100644 pkg/spec/base/builder_test.go create mode 100644 pkg/spec/base/record.go create mode 100644 pkg/spec/base/spec.go create mode 100644 pkg/spec/base/spec_suite_test.go create mode 100644 pkg/spec/spec.go create mode 100644 pkg/spec/v3/edge.go create mode 100644 pkg/spec/v3/edge_test.go create mode 100644 pkg/spec/v3/graph.go create mode 100644 pkg/spec/v3/graph_test.go create mode 100644 pkg/spec/v3/node.go create mode 100644 pkg/spec/v3/node_test.go create mode 100644 pkg/spec/v3/nodeid.go create mode 100644 pkg/spec/v3/nodeid_test.go create mode 100644 pkg/spec/v3/prop.go create mode 100644 pkg/spec/v3/prop_test.go create mode 100644 pkg/spec/v3/rank.go create mode 100644 pkg/spec/v3/rank_test.go create mode 100644 pkg/spec/v3/record.go create mode 100644 pkg/spec/v3/spec.go create mode 100644 pkg/spec/v3/spec_suite_test.go create mode 100644 pkg/spec/v3/value.go create mode 100644 pkg/spec/v3/value_test.go create mode 100644 pkg/stats/concurrency_stats.go create mode 100644 pkg/stats/concurrency_stats_test.go create mode 100644 pkg/stats/stats.go create mode 100644 pkg/stats/stats_suite_test.go create mode 100644 pkg/stats/stats_test.go delete mode 100644 pkg/stats/statsmgr.go create mode 100644 pkg/utils/path.go create mode 100644 pkg/utils/path_test.go create mode 100644 pkg/utils/string_test.go create mode 100644 pkg/utils/testdata/file10 create mode 100644 pkg/utils/testdata/file11 create mode 100644 pkg/utils/testdata/file20 create mode 100644 pkg/utils/utils.go create mode 100644 pkg/utils/utils_suite_test.go create mode 100644 pkg/utils/waitgroupmap.go create mode 100644 pkg/utils/waitgroupmap_test.go create mode 100644 pkg/version/version_suite_test.go create mode 100644 pkg/version/version_test.go delete mode 100644 pkg/web/httpserver.go delete mode 100644 pkg/web/taskmgr.go diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index c6516aeb..00000000 --- a/.dockerignore +++ /dev/null @@ -1,4 +0,0 @@ -example - -LICENSE -*.md diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index b4195bb8..00000000 --- a/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -root = true - -[*] -insert_final_newline = true -charset = utf-8 -trim_trailing_whitespace = true -indent_style = space -indent_size = 2 - -[{Makefile,go.mod,go.sum,*.go}] -indent_style = tab -indent_size = 2 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..d431e9e9 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,50 @@ +name: build + +on: + push: + branches: + - 'master' + tags: + - 'v*' + +permissions: + contents: write + +jobs: + build: + runs-on: [self-hosted, nebula-fast] + container: + image: reg.vesoft-inc.com/ci/ubuntu:20.04-docker + volumes: + - /var/run/docker.sock:/var/run/docker.sock + steps: + - uses: webiny/action-post-run@2.0.1 + with: + run: sh -c "find . -mindepth 1 -delete" + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-go@v3 + with: + go-version: 1.19 + cache: true + - run: | + go env -w GOFLAGS=-buildvcs=false + git config --global --add safe.directory $(pwd) + - run: make lint + - run: make test + - uses: codecov/codecov-action@v2 + - uses: docker/login-action@v1 + if: success() && startsWith(github.ref, 'refs/tags/') + with: + registry: ${{ secrets.HARBOR_REGISTRY }} + username: ${{ secrets.HARBOR_USERNAME }} + password: ${{ secrets.HARBOR_PASSWORD }} + - uses: goreleaser/goreleaser-action@v2 + if: success() && startsWith(github.ref, 'refs/tags/') + with: + version: latest + distribution: goreleaser + args: release --rm-dist + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/package.yaml b/.github/workflows/package.yaml deleted file mode 100644 index 3b99c85b..00000000 --- a/.github/workflows/package.yaml +++ /dev/null @@ -1,67 +0,0 @@ -name: package - -on: - push: - branches: - - master - -jobs: - package: - name: package and upload artifacts - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - os: - - linux - - darwin - - windows - arch: - - amd64 - - arm64 - - arm - exclude: - - os: darwin - arch: arm - - os: windows - arch: arm64 - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-go@v1 - with: - go-version: 1.16.4 - - name: set package name - id: package-name - run: | - output_name=nebula-importer-${{ matrix.os }}-${{ matrix.arch }}-latest - if [ "${{ matrix.os }}" = "windows" ]; then - output_name+='.exe' - fi - echo "::set-output name=package_name::$output_name" - - name: build - env: - GOOS: ${{ matrix.os }} - GOARCH: ${{ matrix.arch }} - CGO_ENABLED: 0 - run: | - cd cmd - go build -o ../${{ steps.package-name.outputs.package_name }} - - uses: actions/upload-artifact@v2 - with: - name: ${{ steps.package-name.outputs.package_name }} - path: ./${{ steps.package-name.outputs.package_name }} - - docker: - name: build docker image - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Build and push Docker images - uses: docker/build-push-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - repository: vesoft/nebula-importer - tags: latest, nightly - tag_with_ref: true - add_git_labels: true diff --git a/.github/workflows/pull_request.yaml b/.github/workflows/pull_request.yaml deleted file mode 100644 index 6c79b04a..00000000 --- a/.github/workflows/pull_request.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: pull_request - -on: - push: - branches: [ master, 'release-**'] - pull_request: - branches: [ master, 'release-**'] - schedule: - - cron: "0 6 * * *" - -jobs: - test: - name: test with nebula docker compose - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: ut test - run: make gotest - - name: test importer - run: docker-compose up --exit-code-from importer - timeout-minutes: 20 - - name: cleanup - if: ${{ always() }} - run: docker-compose down -v diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml deleted file mode 100644 index bcd8d7b1..00000000 --- a/.github/workflows/release.yaml +++ /dev/null @@ -1,72 +0,0 @@ -name: release - -on: - release: - types: - - published - -jobs: - package: - name: package and upload release assets - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - os: - - linux - - darwin - - windows - arch: - - amd64 - - arm64 - - arm - exclude: - - os: darwin - arch: arm - - os: windows - arch: arm64 - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-go@v1 - with: - go-version: 1.16.4 - - name: set package name - id: package-name - run: | - output_name=nebula-importer-${{ matrix.os }}-${{ matrix.arch }}-${GITHUB_REF#refs/tags/} - if [ "${{ matrix.os }}" = "windows" ]; then - output_name+='.exe' - fi - echo "::set-output name=package_name::$output_name" - - name: build - env: - GOOS: ${{ matrix.os }} - GOARCH: ${{ matrix.arch }} - CGO_ENABLED: 0 - run: | - cd cmd - go build -o ../${{ steps.package-name.outputs.package_name }} - - name: upload release asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ github.event.release.upload_url }} - asset_path: ./${{ steps.package-name.outputs.package_name }} - asset_name: ${{ steps.package-name.outputs.package_name }} - asset_content_type: application/octet-stream - - docker: - name: build docker image - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Build and push Docker images - uses: docker/build-push-action@v1 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - repository: vesoft/nebula-importer - tags: latest - tag_with_ref: true - add_git_labels: true diff --git a/.gitignore b/.gitignore index bff25cbd..8b8b5d5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,16 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out +# editor and IDE paraphernalia +.idea +vendor +*.swp +*.swo *.log - -# macOS +*.gz +*.tgz +*~ +bin .DS_Store +*.out +*.log +dist/ -err/ -vendor/ -nebula-importer coverage.* - -# IDE -.vscode/ -.idea/ diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..2df64471 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,246 @@ +linters-settings: + depguard: + list-type: blacklist + packages: + # logging is allowed only by logutils.Log, logrus + # is allowed to use only in logutils package + - github.com/sirupsen/logrus + packages-with-error-message: + - github.com/sirupsen/logrus: "logging is allowed only by logutils.Log" + dupl: + threshold: 100 + gci: + local-prefixes: github.com/vesoft-inc + goconst: + min-len: 2 + min-occurrences: 2 + gocritic: + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + disabled-checks: + - dupImport # https://github.com/go-critic/go-critic/issues/845 + - ifElseChain + - octalLiteral + - whyNoLint + - wrapperFunc + # https://github.com/go-critic/go-critic/blob/master/docs/overview.md#sloppyreassign + # Conflict with go vet --shadow + - sloppyReassign + gocyclo: + min-complexity: 20 + goimports: + local-prefixes: github.com/vesoft-inc + + gomnd: + settings: + mnd: + # don't include the "operation" and "assign" + checks: argument,case,condition,return + govet: + check-shadowing: true + settings: + printf: + funcs: + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + lll: + line-length: 140 + maligned: + suggest-new: true + misspell: + locale: US + nolintlint: + allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space) + allow-unused: false # report any unused nolint directives + require-explanation: false # don't require an explanation for nolint directives + require-specific: false # don't require nolint directives to be specific about which linter is being skipped + stylecheck: + checks: + - all + - '-ST1000' + - '-ST1003' + - '-ST1016' + - '-ST1020' + - '-ST1021' + - '-ST1022' + - '-ST1023' + dot-import-whitelist: [] + http-status-code-whitelist: [] + revive: + ignore-generated-header: true + severity: error + rules: + - name: context-keys-type + - name: time-naming + - name: var-declaration + - name: unexported-return + - name: errorf + # - name: blank-imports + - name: context-as-argument + - name: dot-imports + - name: error-return + - name: error-strings + - name: error-naming + # - name: exported # TODO: remove the comment exclude, and add comments + - name: if-return + - name: increment-decrement + - name: var-naming + arguments: [["ID", "SQL", "URL","RAM","RPC"],[]] + # - name: package-comments # TODO: remove the comment exclude, and add comments + - name: range + - name: receiver-naming + - name: indent-error-flow + - name: argument-limit + arguments: [5] + - name: cyclomatic + arguments: [20] + - name: max-public-structs + arguments: [20] + # - name: file-header # TODO: Add licence file header + # arguments: ["/*\nCopyright 2021 Vesoft Inc."] + - name: empty-block + - name: superfluous-else + - name: confusing-naming + # - name: get-return + - name: modifies-parameter + - name: confusing-results + - name: deep-exit + - name: unused-parameter + - name: unreachable-code + # - name: add-constant + - name: flag-parameter + - name: unnecessary-stmt + - name: struct-tag + - name: modifies-value-receiver + - name: constant-logical-expr + - name: bool-literal-in-expr + - name: redefines-builtin-id + - name: function-result-limit + arguments: [ 5 ] + - name: imports-blacklist + arguments: [] + - name: range-val-in-closure + - name: range-val-address + - name: waitgroup-by-value + - name: atomic + - name: empty-lines + - name: line-length-limit + arguments: [ 140 ] + - name: call-to-gc + - name: duplicated-imports + - name: import-shadowing + - name: bare-return + - name: unused-receiver + - name: unhandled-error + arguments: ["fmt.Printf"] + - name: cognitive-complexity + arguments: [50] + - name: string-of-int + # - name: string-format + # arguments: [] + - name: early-return + - name: unconditional-recursion + - name: identical-branches + - name: defer + arguments: [["call-chain","loop","method-call","recover", "return"]] + - name: unexported-naming + - name: function-length + arguments: [60, 120] +linters: + disable-all: true + enable: + - bodyclose + - deadcode + - depguard + - dogsled + - dupl + - errcheck + - exportloopref + - exhaustive + # - funlen + - gochecknoinits + - goconst + - gocritic + # - gocyclo + - gofmt + # - goimports + - goprintffuncname + - gosec + - gosimple + - govet + - ineffassign + - lll + - misspell + - nakedret + - noctx + - nolintlint + - rowserrcheck + - staticcheck + - structcheck + - stylecheck + - revive + - typecheck + - unconvert + - unparam + - unused + - varcheck + - whitespace + + # don't enable: + # - asciicheck + # - gomnd + # - scopelint + # - gochecknoglobals + # - gocognit + # - godot + # - godox + # - goerr113 + # - interfacer + # - maligned + # - nestif + # - prealloc + # - testpackage + # - wsl + +issues: + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + - path: _test\.go + linters: + - gomnd + - dupl + - revive + - gochecknoinits + - lll + - errcheck + - goconst + - path: _test\.go + text: "G404: Use of weak random number generator" + linters: + - gosec + - path: _test\.go + text: "shadow: declaration of" + linters: + - govet + - source: go:generate + text: "line is [\\d]+ characters" + linters: + - lll + - revive + + - source: "// [@]?doc[s]?[ ]?:" # for docs comment + text: "line is [\\d]+ characters" + linters: + - lll + - revive +run: + skip-dirs: + - deploy + - docs + - scripts diff --git a/.goreleaser.yaml b/.goreleaser.yaml new file mode 100644 index 00000000..aaa13661 --- /dev/null +++ b/.goreleaser.yaml @@ -0,0 +1,115 @@ +project_name: nebula-importer + +release: + name_template: "NebulaGraph Black Importer {{.Version}}" + +before: + hooks: + - go mod tidy + +builds: + - env: + - CGO_ENABLED=0 + main: ./cmd/nebula-importer + binary: nebula-importer + flags: + - -trimpath + ldflags: + - -s -w + - -X github.com/vesoft-inc/nebula-importer/v4/pkg/version.buildVersion={{ .Version }} + - -X github.com/vesoft-inc/nebula-importer/v4/pkg/version.buildCommit={{ .FullCommit }} + - -X github.com/vesoft-inc/nebula-importer/v4/pkg/version.buildCommitDate={{ .CommitDate }} + - -X github.com/vesoft-inc/nebula-importer/v4/pkg/version.buildDate={{ .Date }} + goos: + - linux + - windows + - darwin + goarch: + - "386" + - amd64 + - arm + - arm64 + goarm: + - "7" + +archives: + - name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 + files: + - README.md + format_overrides: + - goos: windows + format: zip + +dockers: + - &dockers + image_templates: + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-amd64" + use: buildx + dockerfile: Dockerfile.goreleaser + build_flag_templates: + - "--platform=linux/amd64" + - "--pull" + - "--label=org.opencontainers.image.title={{ .ProjectName }}" + - "--label=org.opencontainers.image.description={{ .ProjectName }}" + - "--label=org.opencontainers.image.url=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.source=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.version={{ .Version }}" + - "--label=org.opencontainers.image.created={{ .Date }}" + - "--label=org.opencontainers.image.revision={{ .FullCommit }}" + - <<: *dockers + image_templates: + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-armv7" + goarch: arm + goarm: 7 + build_flag_templates: + - "--platform=linux/arm/v7" + - "--pull" + - "--label=org.opencontainers.image.title={{ .ProjectName }}" + - "--label=org.opencontainers.image.description={{ .ProjectName }}" + - "--label=org.opencontainers.image.url=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.source=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.version={{ .Version }}" + - "--label=org.opencontainers.image.created={{ .Date }}" + - "--label=org.opencontainers.image.revision={{ .FullCommit }}" + - <<: *dockers + image_templates: + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-arm64v8" + goarch: arm64 + build_flag_templates: + - "--platform=linux/arm64/v8" + - "--pull" + - "--label=org.opencontainers.image.title={{ .ProjectName }}" + - "--label=org.opencontainers.image.description={{ .ProjectName }}" + - "--label=org.opencontainers.image.url=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.source=https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer" + - "--label=org.opencontainers.image.version={{ .Version }}" + - "--label=org.opencontainers.image.created={{ .Date }}" + - "--label=org.opencontainers.image.revision={{ .FullCommit }}" +docker_manifests: + - &docker_manifests + name_template: "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:latest" + image_templates: + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-amd64" + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-armv7" + - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-arm64v8" + - <<: *docker_manifests + name_template: "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}" + +nfpms: +- file_name_template: '{{ .ConventionalFileName }}' + id: packages + homepage: https://github.com/vesoft-inc/nebula-ng-tools/tree/master/importer + description: NebulaGraph Black Importer. + maintainer: Vee Zhang + vendor: Vesoft Inc. + bindir: /usr/bin + formats: + - deb + - rpm + - apk diff --git a/Dockerfile b/Dockerfile index 2f9f0275..ce77cfb8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,24 @@ -FROM golang:1.13.2-alpine as builder +FROM reg.vesoft-inc.com/ci/golang:1.18-alpine AS builder -ENV GO111MODULE on -ENV GOPROXY https://goproxy.cn -ENV NEBULA_IMPORTER /home/nebula-importer +LABEL stage=gobuilder -COPY . ${NEBULA_IMPORTER} +ENV CGO_ENABLED 0 +ENV GOOS linux +WORKDIR /build/zero -WORKDIR ${NEBULA_IMPORTER} +ADD go.mod . +ADD go.sum . +COPY pkg pkg +COPY cmd cmd +RUN go mod download -RUN cd cmd \ - && go build -o target/nebula-importer \ - && cp target/nebula-importer /usr/local/bin/nebula-importer +RUN go build -ldflags="-s -w" -o /usr/bin/nebula-importer ./cmd/nebula-importer -FROM alpine +FROM reg.vesoft-inc.com/ci/alpine -COPY --from=builder /usr/local/bin/nebula-importer /usr/local/bin/nebula-importer +RUN apk update --no-cache && apk add --no-cache ca-certificates tzdata +ENV TZ Asia/Shanghai -WORKDIR /root +COPY --from=builder /usr/bin/nebula-importer /usr/bin/nebula-importer -ENTRYPOINT ["nebula-importer"] +ENTRYPOINT ["/usr/bin/nebula-importer"] diff --git a/Makefile b/Makefile index 7b69bd15..469a0a1c 100644 --- a/Makefile +++ b/Makefile @@ -1,36 +1,75 @@ +DOCKER_REGISTRY ?= localhost:5000 +DOCKER_REPO ?= ${DOCKER_REGISTRY}/vesoft +IMAGE_TAG ?= latest -.PHONY: test build clean +export GO111MODULE := on +GOENV := GO15VENDOREXPERIMENT="1" CGO_ENABLED=0 +GO := $(GOENV) go +GO_BUILD := $(GO) build -trimpath +# Get the currently used golang install path (in GOPATH/bin, unless GOBIN is set) +ifeq (,$(shell go env GOBIN)) +GOBIN=$(shell go env GOPATH)/bin +else +GOBIN=$(shell go env GOBIN) +endif -default: build +all: build -build: clean fmt - @cd cmd; \ - CGO_ENABLED=0 go build -ldflags "\ - -X 'github.com/vesoft-inc/nebula-importer/v3/pkg/version.GoVersion=$(shell go version)' \ - -X 'github.com/vesoft-inc/nebula-importer/v3/pkg/version.GitHash=$(shell git rev-parse HEAD)'\ - -X 'github.com/vesoft-inc/nebula-importer/v3/pkg/version.Tag=$(shell git describe --exact-match --abbrev=0 --tags | sed 's/^v//')'\ - " -o nebula-importer; \ - mv nebula-importer ..; - @echo "nebula-importer has been outputed to $$(pwd)/nebula-importer"; +go-generate: $(GOBIN)/mockgen + go generate ./... -vendor: clean fmt - @cd cmd; go mod vendor +check: tidy fmt vet imports lint -vendorbuild: vendor - @cd cmd; \ - CGO_ENABLED=0 go build -mod vendor -o nebula-importer; \ - mv nebula-importer ..; - @echo "nebula-importer has been outputed to $$(pwd)/nebula-importer"; +tidy: + go mod tidy -clean: - rm -rf nebula-importer; +fmt: $(GOBIN)/gofumpt + # go fmt ./... + $(GOBIN)/gofumpt -w -l ./ + +vet: + go vet ./... + +imports: $(GOBIN)/goimports $(GOBIN)/impi + $(GOBIN)/impi --local github.com/vesoft-inc --scheme stdLocalThirdParty \ + -ignore-generated ./... \ + || exit 1 + +lint: $(GOBIN)/golangci-lint + $(GOBIN)/golangci-lint run + +build: + $(GO_BUILD) -ldflags '$(LDFLAGS)' -o bin/nebula-importer ./cmd/nebula-importer/ test: - docker-compose up --exit-code-from importer; \ - docker-compose down -v; + go test -gcflags=all="-l" -race -coverprofile=coverage.txt -covermode=atomic ./pkg/... + +docker-build: + docker build -t "${DOCKER_REPO}/nebula-importer:${IMAGE_TAG}" -f Dockerfile . + +docker-push: docker-build + docker push "${DOCKER_REPO}/nebula-importer:${IMAGE_TAG}" + +tools: $(GOBIN)/goimports \ + $(GOBIN)/impi \ + $(GOBIN)/gofumpt \ + $(GOBIN)/golangci-lint \ + $(GOBIN)/mockgen + +$(GOBIN)/goimports: + go install golang.org/x/tools/cmd/goimports@v0.1.12 + +$(GOBIN)/impi: + go install github.com/pavius/impi/cmd/impi@v0.0.3 + +$(GOBIN)/gofumpt: + go install mvdan.cc/gofumpt@v0.3.1 -gotest: - go test -gcflags=all=-l -race -coverprofile=coverage.txt -covermode=atomic ./... +$(GOBIN)/golangci-lint: + @[ -f $(GOBIN)/golangci-lint ] || { \ + set -e ;\ + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v1.49.0 ;\ + } -fmt: - @go mod tidy && find . -path ./vendor -prune -o -type f -iname '*.go' -exec go fmt {} \; +$(GOBIN)/mockgen: + go install github.com/golang/mock/mockgen@v1.6.0 diff --git a/README.md b/README.md index 3ee79a70..1812e282 100644 --- a/README.md +++ b/README.md @@ -1,408 +1,14 @@ -
-

Nebula Importer

-
- -
-
+[![codecov.io](https://codecov.io/gh/vesoft-inc/nebula-importer/branch/master/graph/badge.svg)](https://codecov.io/gh/vesoft-inc/nebula-importer) +[![Go Report Card](https://goreportcard.com/badge/github.com/vesoft-inc/nebula-importer)](https://goreportcard.com/report/github.com/vesoft-inc/nebula-importer) +[![GolangCI](https://golangci.com/badges/github.com/vesoft-inc/nebula-importer.svg)](https://golangci.com/r/github.com/vesoft-inc/nebula-importer) +[![GoDoc](https://godoc.org/github.com/vesoft-inc/nebula-importer?status.svg)](https://godoc.org/github.com/vesoft-inc/nebula-importer) -[![test](https://github.com/vesoft-inc/nebula-importer/workflows/test/badge.svg)](https://github.com/vesoft-inc/nebula-importer/actions?workflow=test) +# What is NebulaGraph Importer? -## Introduction +**NebulaGraph Importer** is a tool to import data into [NebulaGraph](https://github.com/vesoft-inc/nebula). -Nebula Importer is a CSV importing tool for [Nebula Graph](https://github.com/vesoft-inc/nebula). It reads data in the local CSV files and imports data into Nebula Graph. +## Features -Before you start Nebula Importer, make sure that: +* Supports import csv -* Nebula Graph is deployed. -* A schema, composed of space, tags, and edge types, is created. - -Currently, there are three methods to deploy Nebula Graph: - -1. [nebula-docker-compose](https://github.com/vesoft-inc/nebula-docker-compose "nebula-docker-compose") -2. [rpm package](https://docs.nebula-graph.io/3.1.0/4.deployment-and-installation/2.compile-and-install-nebula-graph/2.install-nebula-graph-by-rpm-or-deb/) -3. [from source](https://docs.nebula-graph.io/3.1.0/4.deployment-and-installation/2.compile-and-install-nebula-graph/1.install-nebula-graph-by-compiling-the-source-code/) - -> The quickest way to deploy Nebula Graph is using [Docker Compose](https://github.com/vesoft-inc/nebula-docker-compose). - -## **CAUTION**: Choose the correct branch - -The rpc protocols (i.e., thrift) in Nebula Graph 1.x, v2, v3 are incompatible. -Nebula Importer master and v3 branch can only connect to Nebula Graph 3.x. - -> Do not mismatch. - -## How to use - -After configuring the YAML file and preparing the CSV files to be imported, you can use this tool to batch write data to Nebula Graph. - -### From the source code - -Nebula Importer is compiled with Go **1.13** or later, so make sure that Go is installed on your system. See the Go [installation document](docs/golang-install-en.md) for the installation and configuration tutorial. - -1. Clone the repository - - * For Nebula Graph 3.x, clone the master branch. - - ``` bash - $ git clone https://github.com/vesoft-inc/nebula-importer.git - ``` - -2. Go to the `nebula-importer` directory. - -``` -$ cd nebula-importer -``` - -3. Build the source code. - -``` -$ make build -``` - -4. Start the service - -``` -$ ./nebula-importer --config /path/to/yaml/config/file -``` - -The `--config` option in the preceding command is used to pass the path of the YAML configuration file. - -### From Docker - -If you are using Docker, you don't have to install Go locally. Pull the [Docker image](https://hub.docker.com/r/vesoft/nebula-importer) for Nebula Importer. Mount the local configuration file and the CSV data files into the container and you are done. - -```bash -$ docker run --rm -ti \ - --network=host \ - -v {your-config-file}:{your-config-file} \ - -v {your-csv-data-dir}:{your-csv-data-dir} \ - vesoft/nebula-importer:{image_version} - --config {your-config-file} -``` - -- `{your-config-file}`: Replace with the absolute path of the local YAML configuration file. -- `{your-csv-data-dir}`: Replace with the absolute path of the local CSV data file. -- `{image_version}`: Replace with the image version you need(e.g. `v1`, `v2`, `v3`) -> **NOTE**: We recommend that you use the relative paths in the `files.path` file. If you use the local absolute path, check how the path is mapped to Docker carefully. - -## Prepare the configuration file - -Nebula Importer uses the YAML configuration file to store information for the CSV files and Nebula Graph server. Here's an [example for v2](examples/v2/example.yaml) and an [example for v1](examples/v1/example.yaml) for the configuration file and the CSV file. You can find the explanation for each option in the following: - -```yaml -version: v2 -description: example -removeTempFiles: false -``` - -* `version`: **Required**. Indicates the configuration file version, the default value is `v2`. Note that `v2` config can be used with both 2.x and 3.x Nebula service. -* `description`: **Optional**. Describes the configuration file. -* `removeTempFiles`: **Optional**. Whether to delete the temporarily generated log and error data files. The default value is `false`. -* `clientSettings`: Stores all the configurations related to the Nebula Graph service. - -```yaml -clientSettings: - retry: 3 - concurrency: 10 - channelBufferSize: 128 - space: test - connection: - user: user - password: password - address: 192.168.8.1:9669,192.168.8.2:9669 - postStart: - commands: | - UPDATE CONFIGS storage:wal_ttl=3600; - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; - afterPeriod: 8s - preStop: - commands: | - UPDATE CONFIGS storage:wal_ttl=86400; - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; -``` - -* `clientSettings.retry`: **Optional**. Shows the failed retrying times to execute nGQL queries in Nebula Graph client. -* `clientSettings.concurrency`: **Optional**. Shows the concurrency of Nebula Graph Client, i.e. the connection number between the Nebula Graph Client and the Nebula Graph Server. The default value is 10. -* `clientSettings.channelBufferSize`: **Optional**. Shows the buffer size of the cache queue for each Nebula Graph Client, the default value is 128. -* `clientSettings.space`: **Required**. Specifies which `space` the data is imported into. Do not import data to multiple spaces at the same time because it causes a performance problem. -* `clientSettings.connection`: **Required**. Configures the `user`, `password`, and `address` information for Nebula Graph Server. -* `clientSettings.postStart`: **Optional**. Stores the operations that are performed after the Nebula Graph Server is connected and before any data is inserted. - * `clientSettings.postStart.commands`: Defines some commands that will run when Nebula Graph Server is connected. - * `clientSettings.postStart.afterPeriod`: Defines the interval between running the preceding commands and inserting data to Nebula Graph Server. -* `clientSettings.preStop`: **Optional**. Configures the operations before disconnecting Nebula Graph Server. - * `clientSettings.preStop.commands`: Defines some command scripts before disconnecting Nebula Graph Server. - -### Files - -The following three configurations are related to the log and data files: - -* `workingDir`: **Optional**. If you have multiple directories containing data with the same file structure, you can use this parameter to switch between them. For example, the value of `path` and `failDataPath` of the configuration below will be automatically changed to `./data/student.csv` and `./data/err/student`. If you change workingDir to `./data1`, the path will be changed accordingly. The param can be either absolute or relative. -* `logPath`: **Optional**. Specifies the log path when importing data. The default path is `/tmp/nebula-importer-{timestamp}.log`. -* `files`: **Required**. It is an array type to configure different data files. You can also import data from a HTTP link by inputting the link in the file path. - -```yaml -workingDir: ./data/ -logPath: ./err/test.log -files: - - path: ./student.csv - failDataPath: ./err/student - batchSize: 128 - limit: 10 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - lazyQuotes: false -``` - -#### CSV data files - -One CSV file can only store one type of vertex or edge. Vertices and edges of the different schema must be stored in different files. - -* `path`: **Required**. Specifies the path where the data files are stored. If a relative path is used, the `path` and current configuration file directory are spliced. Wildcard filename is also supported, for example: `./follower-*.csv`, please make sure that all matching files with the same schema. -* `failDataPath`: **Required**. Specifies the directory for data that failed in inserting so that the failed data are reinserted. -* `batchSize`: **Optional**. Specifies the batch size of the inserted data. The default value is 128. -* `limit`: **Optional**. Limits the max data reading rows. -* `inOrder`: **Optional**. Whether to insert the data rows in the file in order. If you do not specify it, you avoid the decrease in importing rate caused by the data skew. - -* `type & csv`: **Required**. Specifies the file type. Currently, only CSV is supported. Specify whether to include the header and the inserted and deleted labels in the CSV file. - * `withHeader`: The default value is false. The format of the header is described in the following section. - * `withLabel`: The default value is false. The format of the label is described in the following section. - * `delimiter`: **Optional**. Specify the delimiter for the CSV files. The default value is `","`. And only a 1-character string delimiter is supported. - * `lazyQuotes`: **Optional**. If `lazyQuotes` is true, a quote may appear in an unquoted field and a non-doubled quote may appear in a quoted field. - -#### `schema` - -**Required**. Describes the metadata information for the current data file. The `schema.type` has only two values: vertex and edge. - -* When type is set to vertex, details must be described in the vertex field. -* When type is set to edge, details must be described in edge field. - -##### `schema.vertex` - -**Required**. Describes the schema information for vertices. For example, tags. - -```yaml -schema: - type: vertex - vertex: - vid: - index: 1 - function: hash - prefix: abc - tags: - - name: student - props: - - name: age - type: int - index: 2 - - name: name - type: string - index: 1 - - name: gender - type: string - defaultValue: "male" - - name: phone - type: string - nullable: true - - name: email - type: string - nullable: true - nullValue: "__NULL__" - - name: address - type: string - nullable: true - alternativeIndices: - - 7 - - 8 - -# concatItems examples -schema: - type: vertex - vertex: - vid: - concatItems: - - "abc" - - 1 - function: hash -``` - -##### `schema.vertex.vid` - -**Optional**. Describes the vertex ID column and the function used for the vertex ID. - -* `index`: **Optional**. The column number in the CSV file. Started with 0. The default value is 0. -* `concatItems`: **Optional**. The concat item can be `string`, `int` or mixed. `string` represents a constant, and `int` represents an index column. Then connect all items.If set, the above `index` will have no effect. -* `function`: **Optional**. Functions to generate the VIDs. Currently, we only support function `hash`. -* `type`: **Optional**. The type for VIDs. The default value is `string`. -* `prefix`: **Optional**. Add prefix to the original vid. When `function` is specified also, `prefix` is applied to the original vid before `function`. - -##### `schema.vertex.tags` - -**Optional**. Because a vertex can have several tags, different tags are described in the `schema.vertex.tags` parameter. - -Each tag contains the following two properties: - -* `name`: The tag name. -* `prop`: A property of the tag. Each property contains the following two fields: - * `name`: **Required**. The property name, must be the same with the tag property in Nebula Graph. - * `type`: **Optional**. The property type, currently `bool`, `int`, `float`, `double`, `string`, `time`, `timestamp`, `date`, `datetime`, `geography`, `geography(point)`, `geography(linestring)` and `geography(polygon)` are supported. - * `index`: **Optional**. The column number in the CSV file. - * `nullable`: **Optional**. Whether this prop property can be `NULL`, optional values is `true` or `false`, default `false`. - * `nullValue`: **Optional**. Ignored when `nullable` is `false`. The property is set to `NULL` when the value is equal to `nullValue`, default `""`. - * `alternativeIndices`: **Optional**. Ignored when `nullable` is `false`. The property is fetched from csv according to the indices in order until not equal to `nullValue`. - * `defaultValue`: **Optional**. Ignored when `nullable` is `false`. The property default value, when all the values obtained by `index` and `alternativeIndices` are `nullValue`. - -> **NOTE**: The properties in the preceding `prop` parameter must be sorted in the **same** way as in the CSV data file. - -##### `schema.edge` - -**Required**. Describes the schema information for edges. - -```yaml -schema: - type: edge - edge: - name: choose - srcVID: - index: 0 - function: hash - dstVID: - index: 1 - function: hash - rank: - index: 2 - props: - - name: grade - type: int - index: 3 -``` - -The edge parameter contains the following fields: - -* `name`: **Required**. The name of the edge type. -* `srcVID`: **Optional**. The source vertex information for the edge. The `index` and `function` included here are the same as that of in the `vertex.vid` parameter. -* `dstVID`: **Optional**. The destination vertex information for the edge. The `index` and `function` included here are the same as that of in the `vertex.vid` parameter. -* `rank`: **Optional**. Specifies the `rank` value for the edge. The `index` indicates the column number in the CSV file. -* `props`: **Required**. The same as the `props` in the vertex. The properties in the `prop` parameter must be sorted in the **same** way as in the CSV data file. - -See the [Configuration Reference](docs/configuration-reference.md) for details on the configurations. - -## About the CSV header - -Usually, you can add some descriptions in the first row of the CSV file to specify the type for each column. - -### Data without header - -If the `csv.withHeader` is set to `false`, the CSV file only contains the data (no descriptions in the first row). Example for vertices and edges are as follows: - -#### Vertex example - -Take tag `course` for example: - -```csv -101,Math,3,No5 -102,English,6,No11 -``` - -The first column is the vertex ID, the following three columns are the properties, corresponding to the course.name, course.credits and building.name in the configuration file. (See `vertex.tags.props`). - -#### Edge example - -Take edge type `choose` for example: - -```csv -200,101,5 -200,102,3 -``` - -The first two columns are the source VID and destination VID. The third column corresponds to the choose.likeness property. If an edge contains the rank value, put it in the third column. Then put the edge properties in order. - -### Data with header - -If the `csv.withHeader` is set to `true`, the first row of the CSV file is the header information. - -The format for each column is `.:`: - -* `` is the name for the vertex or edge. -* `` is the property name. -* `` is the property type. It can be `bool`, `int`, `float`, `double`, `string`, `time`, `timestamp`, `date`, `datetime`, `geography`, `geography(point)`, `geography(linestring)` and `geography(polygon)`. The default type is `string`. - -In the above `` field, the following keywords contain special semantics: - -* `:VID` is the vertex ID. -* `:SRC_VID` is the source vertex VID. -* `:DST_VID` is the destination vertex VID. -* `:RANK` is the rank of the edge. -* `:IGNORE` indicates the column is ignored. -* `:LABEL` indicates the column is marked as inserted/deleted `+/-`. - -> **NOTE**: If the CSV file contains the header, the importer parses the schema of each row according to the header and ignores the `props` in YAML. - -#### Example of vertex CSV file with header - -Take vertex course as example: - -```csv -:LABEL,:VID,course.name,building.name:string,:IGNORE,course.credits:int -+,"hash(""Math"")",Math,No5,1,3 -+,"hash(""English"")",English,"No11 B\",2,6 -``` - -##### LABEL (optional) - -```csv -:LABEL, -+, --, -``` - -Indicates the column is the insertion (+) or deletion (-) operation. - -##### :VID (required) - -```csv -:VID -123, -"hash(""Math"")", -"hash(""English"")" -``` - -In the `:VID` column, in addition to the common integer values (such as 123), you can also use the two built-in function `hash` to automatically generate the VID for the vertices (for example, hash("Math")). - -> **NOTE**: The double quotes (") are escaped in the CSV file. For example, `hash("Math")` must be written as `"hash(""Math"")"`. - -##### Other Properties - -```csv -course.name,:IGNORE,course.credits:int -Math,1,3 -English,2,6 -``` - -`:IGNORE` is to specify the column that you want to ignore when importing data. All columns except the `:LABEL` column can be sorted in any order. Thus, for a large CSV file, you can select the columns you need flexibly by setting the header. - -> Because a VERTEX can contain multiple TAGs, when specifying the header, you must add the tag name. For example, it must be `course.credits`, rather than the abbreviated `credits`). - -#### Example of edge CSV file with header - -Take edge `follow` for example: - -```csv -:DST_VID,follow.likeness:double,:SRC_VID,:RANK -201,92.5,200,0 -200,85.6,201,1 -``` - -In the preceding example, the source vertex of the edge is `:SRC_VID` (in column 4), the destination vertex of the edge is `:DST_VID` (in column 1), and the property on the edge is `follow.likeness:double`(in column 2), the ranking field of the edge is `:RANK` (in column 5. The default value is 0 if you do not specify). - -#### Label(optional) - -* `+` means inserting -* `-` means deleting - -Similar to vertex, you can specify label for header in the edge CSV file . +TODO Docs diff --git a/README_zh-CN.md b/README_zh-CN.md deleted file mode 100644 index 76890917..00000000 --- a/README_zh-CN.md +++ /dev/null @@ -1,382 +0,0 @@ -
-

Nebula Importer

-
- EN -
-
- -[![test](https://github.com/vesoft-inc/nebula-importer/workflows/test/badge.svg)](https://github.com/vesoft-inc/nebula-importer/actions?workflow=test) - - diff --git a/ci/bootstrap.sh b/ci/bootstrap.sh deleted file mode 100755 index 1d839930..00000000 --- a/ci/bootstrap.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/sh - -set -e - -addr=$1 -port=$2 - -export GOPATH=/usr/local/nebula/ -export GO111MODULE=on - -# build git -apk add git - -# build nebula-console -mkdir -p nebulaconsolebuild -cd nebulaconsolebuild - wget "https://github.com/vesoft-inc/nebula-console/archive/master.zip" -O nebula-console.zip - unzip ./nebula-console.zip -d ./ - cd nebula-console-master - go build -o ../../nebula-console - cd .. -cd .. -rm -rf nebulaconsolebuild - -cd /usr/local/nebula/importer/cmd -go build -o ../../nebula-importer -cd /usr/local/nebula - -until echo "quit" | /usr/local/nebula/nebula-console -u root -p password --addr=$addr --port=$port &> /dev/null; do - echo "nebula graph is unavailable - sleeping" - sleep 2 -done - -echo "nebula graph is up - executing command" -for i in `seq 1 30`;do - echo "Adding hosts..." - var=`/usr/local/nebula/nebula-console -addr graphd1 -port 9669 -u root -p nebula -e 'ADD HOSTS "storaged":9779'`; - if [[ $$? == 0 ]];then - echo "Add hosts succeed" - break; - fi; - sleep 2; - echo "retry to add hosts."; -done - -./nebula-importer --config ./importer/examples/v1/example.yaml -./nebula-importer --config ./importer/examples/v2/example.yaml diff --git a/cmd/importer.go b/cmd/importer.go deleted file mode 100644 index 83a336f7..00000000 --- a/cmd/importer.go +++ /dev/null @@ -1,84 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "log" - "os" - "time" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/cmd" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" - "github.com/vesoft-inc/nebula-importer/v3/pkg/version" - "github.com/vesoft-inc/nebula-importer/v3/pkg/web" -) - -var configuration = flag.String("config", "", "Specify importer configure file path") -var echoVersion = flag.Bool("version", false, "echo build version") -var port = flag.Int("port", -1, "HTTP server port") -var callback = flag.String("callback", "", "HTTP server callback address") - -func main() { - errCode := 0 - - flag.Parse() - runnerLogger := logger.NewRunnerLogger("") - if *echoVersion { - fmt.Printf("%s \n", version.GoVersion) - fmt.Printf("Git Hash: %s \n", version.GitHash) - fmt.Printf("Tag: %s \n", version.Tag) - return - } - defer func() { - // Just for filebeat log fetcher to differentiate following logs from others - time.Sleep(1 * time.Second) - log.Println("--- END OF NEBULA IMPORTER ---") - os.Exit(errCode) - }() - - log.Println("--- START OF NEBULA IMPORTER ---") - if port != nil && *port > 0 && callback != nil && *callback != "" { - // Start http server - svr := &web.WebServer{ - Port: *port, - Callback: *callback, - RunnerLogger: runnerLogger, - } - - if err := svr.Start(); err != nil { - panic(err) - } - } else { - if *configuration == "" { - log.Fatal("please configure yaml file") - } - - conf, err := config.Parse(*configuration, runnerLogger) - if err != nil { - e := err.(errors.ImporterError) - log.Println(e.ErrMsg.Error()) - errCode = e.ErrCode - return - } - - runner := &cmd.Runner{} - - { - now := time.Now() - defer func() { - time.Sleep(500 * time.Millisecond) - if runner.Error() != nil { - e := runner.Error().(errors.ImporterError) - errCode = e.ErrCode - log.Println(e.ErrMsg.Error()) - } else { - log.Printf("Finish import data, consume time: %.2fs", time.Since(now).Seconds()) - } - }() - - runner.Run(conf) - } - } -} diff --git a/cmd/nebula-importer/main.go b/cmd/nebula-importer/main.go new file mode 100644 index 00000000..ce745c40 --- /dev/null +++ b/cmd/nebula-importer/main.go @@ -0,0 +1,13 @@ +package main + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/cmd" + "github.com/vesoft-inc/nebula-importer/v4/pkg/cmd/util" +) + +func main() { + command := cmd.NewDefaultImporterCommand() + if err := util.Run(command); err != nil { + util.CheckErr(err) + } +} diff --git a/docker-compose.yaml b/docker-compose.yaml deleted file mode 100644 index a3a0dc05..00000000 --- a/docker-compose.yaml +++ /dev/null @@ -1,139 +0,0 @@ -version: '3.4' -services: - metad: - image: vesoft/nebula-metad:nightly - environment: - USER: root - TZ: UTC - command: - - --meta_server_addrs=metad:9559 - - --local_ip=metad - - --ws_ip=metad - - --port=9559 - - --ws_http_port=19559 - healthcheck: - test: ["CMD", "curl", "-f", "http://metad:19559/status"] - interval: 15s - timeout: 10s - retries: 3 - start_period: 20s - ports: - - 9559 - - 19559 - - 19560 - networks: - nebula-net: - restart: on-failure - cap_add: - - SYS_PTRACE - - graphd1: - image: vesoft/nebula-graphd:nightly - environment: - USER: root - TZ: UTC - command: - - --meta_server_addrs=metad:9559 - - --local_ip=graphd1 - - --ws_ip=graphd1 - - --port=9669 - - --ws_http_port=19669 - - --local_config=false - depends_on: - - metad - - storaged - healthcheck: - test: ["CMD", "curl", "-f", "http://graphd1:19669/status"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 20s - ports: - - 9669 - - 19669 - - 19670 - networks: - nebula-net: - restart: on-failure - cap_add: - - SYS_PTRACE - - graphd2: - image: vesoft/nebula-graphd:nightly - environment: - USER: root - TZ: UTC - command: - - --meta_server_addrs=metad:9559 - - --local_ip=graphd1 - - --ws_ip=graphd2 - - --port=9669 - - --ws_http_port=19669 - - --enable_authorize=true - - --local_config=false - depends_on: - - metad - - storaged - healthcheck: - test: ["CMD", "curl", "-f", "http://graphd2:19669/status"] - interval: 30s - timeout: 10s - retries: 3 - start_period: 20s - ports: - - 9669 - - 19669 - - 19670 - networks: - nebula-net: - restart: on-failure - cap_add: - - SYS_PTRACE - - importer: - image: golang:1.13.2-alpine - volumes: - - .:/usr/local/nebula/importer:rw - working_dir: /usr/local/nebula - entrypoint: - - ./importer/ci/bootstrap.sh - - "graphd1" - - "9669" - depends_on: - - graphd1 - - graphd2 - networks: - nebula-net: - - storaged: - image: vesoft/nebula-storaged:nightly - environment: - USER: root - TZ: UTC - command: - - --meta_server_addrs=metad:9559 - - --local_ip=storaged - - --ws_ip=storaged - - --port=9779 - - --ws_http_port=19779 - - --local_config=false - depends_on: - - metad - healthcheck: - test: ["CMD", "curl", "-f", "http://storaged:19779/status"] - interval: 15s - timeout: 10s - retries: 3 - start_period: 20s - ports: - - 9779 - - 19779 - - 19780 - networks: - nebula-net: - restart: on-failure - cap_add: - - SYS_PTRACE - -networks: - nebula-net: diff --git a/docs/configuration-reference.md b/docs/configuration-reference.md deleted file mode 100644 index c617b9c7..00000000 --- a/docs/configuration-reference.md +++ /dev/null @@ -1,56 +0,0 @@ -# Nebula Importer Configuration Description - -| options | description | default | -| :-- | :-- | :-- | -| version | Configuration file version | v1 | -| description | Description of this configure file | "" | -| removeTempFiles | Whether to remove generated temporary data and log files | false | -| clientSettings | Graph client settings | - | -| clientSettings.retry | Number of graph clients retry to execute failed nGQL | 1 | -| clientSettings.concurrency | Number of graph clients | 4 | -| clientSettings.channelBufferSize | Buffer size of client channels | 128 | -| clientSettings.space | Space name of all data to be inserted | "" | -| clientSettings.connection | Connection options of graph client | - | -| clientSettings.connection.user | Username | user | -| clientSettings.connection.password | Password | password | -| clientSettings.connection.address | Address of graph client | 127.0.0.1:9669 | -| clientSettings.postStart.commands | Post scripts after connecting nebula | "" | -| clientSettings.postStart.afterPeriod | The period time between running post scripts and inserting data | 0s | -| clientSettings.preStop.commands | Prescripts before disconnecting nebula | "" | -| logPath | Path of log file | "" | -| files | File list to be imported | - | -| files[0].path | File path | "" | -| files[0].failDataPath | Failed data file path | "" | -| files[0].batchSize | Size of each batch for inserting stmt construction | 128 | -| files[0].limit | Limit rows to be read | NULL | -| files[0].inOrder | Whether to insert rows in order | false | -| files[0].type | File type | csv | -| files[0].csv | CSV file options | - | -| files[0].csv.withHeader | Whether csv file has header | false | -| files[0].csv.withLabel | Whether csv file has `+/-` label to represent **delete/insert** operation | false | -| files[0].csv.delimiter | The delimiter of csv file to separate different columns | "," | -| files[0].schema | Schema definition for this file data | - | -| files[0].schema.type | Schema type: vertex or edge | vertex | -| files[0].schema.edge | Edge options | - | -| files[0].schema.edge.srcVID.index | Column index of source vertex id of edge | 0 | -| files[0].schema.edge.srcVID.function | The generation function of edge source vertex id | "" | -| files[0].schema.edge.srcVID.type | Type of source vertex id of edge | 0 | -| files[0].schema.edge.dstVID.index | Column index of destination vertex id of edge | 1 | -| files[0].schema.edge.dstVID.function | The generation function of edge destination vertex id | "" | -| files[0].schema.edge.dstVID.type | Type of destination vertex id of edge | 1 | -| files[0].schema.edge.rank.index | Column index of the edge rank | 2 | -| files[0].schema.edge.name | Edge name in above space | "" | -| files[0].schema.edge.props | Properties of the edge | - | -| files[0].schema.edge.props[0].name | Property name | "" | -| files[0].schema.edge.props[0].type | Property type | "" | -| files[0].schema.edge.props[0].index | Property index | | -| files[0].schema.vertex | Vertex options | - | -| files[0].schema.vertex.vid.index | Column index of vertex vid | 0 | -| files[0].schema.vertex.vid.function | The generation function of vertex vid | "" | -| files[0].schema.vertex.vid.type | The type of vertex vid | "string" | -| files[0].schema.vertex.tags | Vertex tags options | - | -| files[0].schema.vertex.tags[0].name | Vertex tag name | "" | -| files[0].schema.vertex.tags[0].props | Vertex tag's properties | - | -| files[0].schema.vertex.tags[0].props[0].name | Vertex tag's property name | "" | -| files[0].schema.vertex.tags[0].props[0].type | Vertex tag's property type | "" | -| files[0].schema.vertex.tags[0].props[0].index | Vertex tag's property index | | diff --git a/docs/golang-install-en.md b/docs/golang-install-en.md deleted file mode 100644 index cf754086..00000000 --- a/docs/golang-install-en.md +++ /dev/null @@ -1,39 +0,0 @@ -# Build Go environment - -## Download the installation package - -- https://studygolang.com/dl - -## Unzip the package and move it to /usr/local/go - -```bash -$ mv golang-1.13 /usr/local/go -``` - -## Configure environment variables - -```bash -export GOROOT=/usr/local/go -export GOPATH=$HOME/go -export GO111MODULE=on -export GOPROXY=https://goproxy.cn -export PATH=$PATH:$GOROOT/bin:$GOPATH/bin -``` - -Add the preceding configurations to the `~/.bashrc` file. Run the `source ~/.bashrc` command to take effect. - -## Verify your installation - -```bash -$ go version -``` - -## Compile nebula-importer - -Go to the nebula-importer project directory and run the following commands: - -```bash -$ cd nebula-importer/cmd -$ go build -mod vendor -o nebula-importer -$ ./nebula-importer --help -``` diff --git a/docs/golang-install.md b/docs/golang-install.md deleted file mode 100644 index 4147de3f..00000000 --- a/docs/golang-install.md +++ /dev/null @@ -1,39 +0,0 @@ -# Golang 环境搭建 - -## 下载安装包 - -- https://studygolang.com/dl - -## 解压并移动到 /usr/local/go - -```bash -$ mv golang-1.13 /usr/local/go -``` - -## 配置环境变量 - -```bash -export GOROOT=/usr/local/go -export GOPATH=$HOME/go -export GO111MODULE=on -export GOPROXY=https://goproxy.cn -export PATH=$PATH:$GOROOT/bin:$GOPATH/bin -``` - -将上述配置加到 `~/.bashrc` 文件中,并通过 `source ~/.bashrc` 使其生效。 - -## 检验是否安装成功 - -```bash -$ go version -``` - -## 编译 nebula-importer - -首先进入 nebula-importer 的项目目录。然后执行如下的命令: - -```bash -$ cd nebula-importer/cmd -$ go build -mod vendor -o nebula-importer -$ ./nebula-importer --help -``` diff --git a/examples/basic/README.md b/examples/basic/README.md new file mode 100644 index 00000000..e69de29b diff --git a/examples/basic/basic.int.v3.yaml b/examples/basic/basic.int.v3.yaml new file mode 100644 index 00000000..5d5209dd --- /dev/null +++ b/examples/basic/basic.int.v3.yaml @@ -0,0 +1,150 @@ +client: + version: v3 + address: "127.0.0.1:9669" + user: root + password: nebula + concurrencyPerAddress: 1 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: basic_int_examples + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + DROP SPACE IF EXISTS basic_int_examples; + CREATE SPACE IF NOT EXISTS basic_int_examples(partition_num=5, replica_factor=1, vid_type=int); + USE basic_int_examples; + CREATE TAG Person(firstName STRING, lastName STRING, gender STRING, birthday DATE, creationDate DATETIME, locationIP STRING, browserUsed STRING); + CREATE EDGE KNOWS(creationDate DATETIME); + wait: 10s + +sources: + - path: ./person.csv + csv: + delimiter: "|" + tags: + - name: Person + id: + type: "INT" + index: 0 + ignoreExistedIndex: true + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: male + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + nullable: true + alternativeIndices: + - 6 + - name: Person + id: + type: "INT" + index: 0 + function: hash + ignoreExistedIndex: true + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: male + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + nullable: true + alternativeIndices: + - 6 + - path: ./knows.csv + edges: + - name: KNOWS # person_knows_person + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + ignoreExistedIndex: true + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 + - name: KNOWS # person_knows_person + src: + id: + type: "INT" + index: 0 + function: hash + dst: + id: + type: "INT" + index: 1 + function: hash + rank: + index: 0 + ignoreExistedIndex: false + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/examples/basic/basic.string.v3.yaml b/examples/basic/basic.string.v3.yaml new file mode 100644 index 00000000..7fdf05a9 --- /dev/null +++ b/examples/basic/basic.string.v3.yaml @@ -0,0 +1,99 @@ +client: + version: v3 + address: "127.0.0.1:9669" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: basic_string_examples + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + DROP SPACE IF EXISTS basic_string_examples; + CREATE SPACE IF NOT EXISTS basic_string_examples(partition_num=5, replica_factor=1, vid_type=FIXED_STRING(64)); + USE basic_string_examples; + CREATE TAG Person(firstName STRING, lastName STRING, gender STRING, birthday DATE, creationDate DATETIME, locationIP STRING, browserUsed STRING); + CREATE EDGE KNOWS(creationDate DATETIME); + wait: 10s + +sources: + - path: ./person.csv + csv: + delimiter: "|" + tags: + - name: Person + id: + type: "STRING" + concatItems: + - person_ + - 0 + - _id + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: female + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + - path: ./knows.csv + batch: 256 + edges: + - name: KNOWS # person_knows_person + src: + id: + type: "STRING" + concatItems: + - person_ + - 0 + - _id + dst: + id: + type: "STRING" + concatItems: + - person_ + - 1 + - _id + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/examples/basic/knows.csv b/examples/basic/knows.csv new file mode 100644 index 00000000..704df2e8 --- /dev/null +++ b/examples/basic/knows.csv @@ -0,0 +1,9 @@ +933,2199023256684,2010-04-22T12:30:57 +933,10995116278700,2010-11-15T07:23:49 +933,21990232556027,2011-12-15T02:34:43 +1129,21990232556027,2010-04-13T16:44:22 +1129,10995116278700,2011-01-02T22:39:28 +1129,4398046512167,2011-09-17T21:40:17 +1129,6597069767117,2012-01-19T19:21:54 +21990232556027,6597069767117,2012-04-15T04:24:28 +6597069767117,4398046512167,2012-05-29T18:00:10 diff --git a/examples/basic/person.csv b/examples/basic/person.csv new file mode 100644 index 00000000..cf5aa9c7 --- /dev/null +++ b/examples/basic/person.csv @@ -0,0 +1,13 @@ +933|Mahinda|Perera|male|1989-12-03|2010-02-14T15:32:10|119.235.7.103|Firefox +1129|Carmen|Lepland|female|1984-02-18|2010-01-28T06:39:58|195.20.151.175|Internet Explorer +2199023256684|A.|Rao|female|1985-08-02|2010-04-23T22:52:26|49.202.188.25|Firefox +4398046512167|Gustavo|Arbelaez|male|1986-11-02|2010-06-16T20:53:47|190.96.189.165|Chrome +6597069767117|Eli|Peretz|female|1989-01-18|2010-07-23T12:55:52|41.92.31.35|Internet Explorer +10995116278700|Joseph|Anderson|female|1986-01-07|2010-11-06T05:06:52|24.49.203.161|Firefox +17592186045684|Michael|Li|male|1983-04-20|2011-06-05T01:41:59|1.50.202.121|Firefox +21990232556027|Yacine|Abdelli|male|1988-07-26|2011-10-30T03:42:11|82.101.132.47|Firefox +21990232556585|Faisal|Malik|male|1981-02-02|2011-10-23T21:12:23|202.163.114.175|Chrome +24189255812290|Manuel|Alvarez||1985-12-27|2011-11-13T07:28:40|168.165.167.25|Firefox +26388279066636|Jose|Alonso|female|_NULL_|2012-02-15T13:41:01|196.1.135.241|Internet Explorer +28587302322727|Steve|Moore|male|1981-08-28|2012-03-23T00:44:07|18.250.105.113| +30786325578904|Giuseppe|Donati||_NULL_|2012-05-24T12:36:37|46.254.44.230| diff --git a/examples/hdfs/hdfs.v3.yaml b/examples/hdfs/hdfs.v3.yaml new file mode 100644 index 00000000..5f17b72d --- /dev/null +++ b/examples/hdfs/hdfs.v3.yaml @@ -0,0 +1,50 @@ +client: + version: v3 + address: "127.0.0.1:9669" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: hdfs_examples + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + CREATE SPACE IF NOT EXISTS `hdfs_examples`(PARTITION_NUM = 20, REPLICA_FACTOR = 1, vid_type = INT); + USE `hdfs_examples`; + CREATE TAG IF NOT EXISTS `Tag`(`date` STRING); + wait: 10s + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + fields: + - key: app + value: nebula-importer + +sources: + - hdfs: + address: "127.0.0.1:8020" + user: "hdfs" + path: "/events/20190918.export.csv" + csv: + delimiter: "\t" + tags: + - name: Tag + id: + type: "INT" + index: 0 + props: + - name: "date" + type: "STRING" + index: 1 diff --git a/examples/sf/.gitignore b/examples/ldbc-snb/.gitignore similarity index 60% rename from examples/sf/.gitignore rename to examples/ldbc-snb/.gitignore index 0cff17ed..b60ddffc 100644 --- a/examples/sf/.gitignore +++ b/examples/ldbc-snb/.gitignore @@ -2,4 +2,4 @@ !.gitignore !README.md -!sf.yaml +!ldbc-snb.*.yaml diff --git a/examples/sf/README.md b/examples/ldbc-snb/README.md similarity index 100% rename from examples/sf/README.md rename to examples/ldbc-snb/README.md diff --git a/examples/ldbc-snb/ldbc-snb.v3.yaml b/examples/ldbc-snb/ldbc-snb.v3.yaml new file mode 100644 index 00000000..1c1aa1a8 --- /dev/null +++ b/examples/ldbc-snb/ldbc-snb.v3.yaml @@ -0,0 +1,578 @@ +client: + version: v3 + address: "127.0.0.1:9669" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: ldbc_snb_examples + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + CREATE SPACE IF NOT EXISTS `ldbc_snb_examples`(PARTITION_NUM = 120, REPLICA_FACTOR = 1, vid_type = INT); + USE `ldbc_snb_examples`; + CREATE TAG IF NOT EXISTS `Comment`(`creationDate` DATETIME,`locationIP` STRING,`browserUsed` STRING,`content` STRING,`length` INT); + CREATE TAG IF NOT EXISTS `Forum`(`title` STRING,`creationDate` DATETIME); + CREATE TAG IF NOT EXISTS `Tag`(`name` STRING,`url` STRING); + CREATE TAG IF NOT EXISTS `Organisation`(`type` STRING,`name` STRING,`url` STRING); + CREATE TAG IF NOT EXISTS `Post`(`imageFile` STRING,`creationDate` DATETIME,`locationIP` STRING,`browserUsed` STRING,`language` STRING,`content` STRING,`length` INT); + CREATE TAG IF NOT EXISTS `Person`(`firstName` STRING,`lastName` STRING,`gender` STRING,`birthday` DATE,`creationDate` DATETIME,`locationIP` STRING,`browserUsed` STRING); + CREATE TAG IF NOT EXISTS `Place`(`name` STRING,`url` STRING,`type` STRING); + CREATE TAG IF NOT EXISTS `TagClass`(`name` STRING,`url` STRING); + CREATE EDGE IF NOT EXISTS `HAS_MEMBER`(`joinDate` DATETIME); + CREATE EDGE IF NOT EXISTS `HAS_TAG`(); + CREATE EDGE IF NOT EXISTS `STUDY_AT`(`classYear` INT); + CREATE EDGE IF NOT EXISTS `IS_PART_OF`(); + CREATE EDGE IF NOT EXISTS `IS_LOCATED_IN`(); + CREATE EDGE IF NOT EXISTS `WORK_AT`(`workFrom` INT); + CREATE EDGE IF NOT EXISTS `CONTAINER_OF`(); + CREATE EDGE IF NOT EXISTS `IS_SUBCLASS_OF`(); + CREATE EDGE IF NOT EXISTS `HAS_MODERATOR`(); + CREATE EDGE IF NOT EXISTS `HAS_TYPE`(); + CREATE EDGE IF NOT EXISTS `KNOWS`(`creationDate` DATETIME); + CREATE EDGE IF NOT EXISTS `HAS_INTEREST`(); + CREATE EDGE IF NOT EXISTS `COMMENT_HAS_CREATOR`(`creationDate` DATETIME,`locationIP` STRING,`browserUsed` STRING,`content` STRING,`length` INT64); + CREATE EDGE IF NOT EXISTS `REPLY_OF_COMMENT`(); + CREATE EDGE IF NOT EXISTS `LIKES_COMMENT`(`creationDate` DATETIME); + CREATE EDGE IF NOT EXISTS `POST_HAS_CREATOR`(`imageFile` STRING,`creationDate` DATETIME,`locationIP` STRING,`browserUsed` STRING,`language` STRING,`content` STRING,`length` INT64); + CREATE EDGE IF NOT EXISTS `REPLY_OF_POST`(); + CREATE EDGE IF NOT EXISTS `LIKES_POST`(`creationDate` DATETIME); + wait: 10s + +sources: + - path: ./social_network/static/place.csv + csv: + delimiter: "|" + tags: + - name: Place + id: + type: "INT" + index: 0 + props: + - name: "name" + type: "STRING" + index: 1 + - name: "url" + type: "STRING" + index: 2 + - name: "type" + type: "STRING" + index: 3 + + - path: ./social_network/static/organisation.csv + csv: + delimiter: "|" + tags: + - name: Organisation + id: + type: "INT" + index: 0 + props: + - name: "type" + type: "STRING" + index: 1 + - name: "name" + type: "STRING" + index: 2 + - name: "url" + type: "STRING" + index: 3 + + - path: ./social_network/dynamic/person.csv + csv: + delimiter: "|" + tags: + - name: Person + id: + type: "INT" + index: 0 + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + - name: "birthday" + type: "DATE" + index: 4 + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + + - path: ./social_network/dynamic/forum.csv + csv: + delimiter: "|" + tags: + - name: Forum + id: + type: "INT" + index: 0 + props: + - name: "title" + type: "STRING" + index: 1 + - name: "creationDate" + type: "DATETIME" + index: 2 + + - path: ./social_network/dynamic/post.csv + csv: + delimiter: "|" + tags: + - name: Post + id: + type: "INT" + index: 0 + props: + - name: "imageFile" + type: "STRING" + index: 1 + - name: "creationDate" + type: "DATETIME" + index: 2 + - name: "locationIP" + type: "STRING" + index: 3 + - name: "browserUsed" + type: "STRING" + index: 4 + - name: "language" + type: "STRING" + index: 5 + - name: "content" + type: "STRING" + index: 6 + - name: "length" + type: "INT" + index: 7 + + - path: ./social_network/dynamic/comment.csv + csv: + delimiter: "|" + tags: + - name: Comment + id: + type: "INT" + index: 0 + props: + - name: "creationDate" + type: "DATETIME" + index: 1 + - name: "locationIP" + type: "STRING" + index: 2 + - name: "browserUsed" + type: "STRING" + index: 3 + - name: "content" + type: "STRING" + index: 4 + - name: "length" + type: "INT" + index: 5 + + - path: ./social_network/static/tag.csv + csv: + delimiter: "|" + tags: + - name: Tag + id: + type: "INT" + index: 0 + props: + - name: "name" + type: "STRING" + index: 1 + - name: "url" + type: "STRING" + index: 2 + + - path: ./social_network/static/tagclass.csv + csv: + delimiter: "|" + tags: + - name: TagClass + id: + type: "INT" + index: 0 + props: + - name: "name" + type: "STRING" + index: 1 + - name: "url" + type: "STRING" + index: 2 + + - path: ./social_network/static/place_isPartOf_place.csv + csv: + delimiter: "|" + edges: + - name: IS_PART_OF # PLACE_IS_PART_OF_PLACE + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/static/organisation_isLocatedIn_place.csv + csv: + delimiter: "|" + edges: + - name: IS_LOCATED_IN # ORGANISATION_IS_LOCATED_IN_PLACE + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/person_isLocatedIn_place.csv + csv: + delimiter: "|" + edges: + - name: IS_LOCATED_IN # PERSON_IS_LOCATED_IN_PLACE + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/person_studyAt_organisation.csv + csv: + delimiter: "|" + edges: + - name: STUDY_AT # PERSON_STUDY_AT_ORGANISATION + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "classYear" + type: "INT" + index: 2 + + - path: ./social_network/dynamic/person_workAt_organisation.csv + csv: + delimiter: "|" + edges: + - name: WORK_AT # PERSON_WORK_AT_ORGANISATION + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "workFrom" + type: "INT" + index: 2 + + - path: ./social_network/dynamic/person_knows_person.csv + csv: + delimiter: "|" + edges: + - name: KNOWS # PERSON_KNOWS_PERSON + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + + - path: ./social_network/dynamic/person_likes_post.csv + csv: + delimiter: "|" + edges: + - name: LIKES_POST # PERSON_LIKES_POST + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + + - path: ./social_network/dynamic/person_likes_comment.csv + csv: + delimiter: "|" + edges: + - name: LIKES_COMMENT # PERSON_LIKES_COMMENT + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + + - path: ./social_network/dynamic/person_hasInterest_tag.csv + csv: + delimiter: "|" + edges: + - name: HAS_INTEREST # PERSON_HAS_INTEREST_TAG + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/forum_hasMember_person.csv + csv: + delimiter: "|" + edges: + - name: HAS_MEMBER # FORUM_HAS_MEMBER_PERSON + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "joinDate" + type: "DATETIME" + index: 2 + + - path: ./social_network/dynamic/forum_hasModerator_person.csv + csv: + delimiter: "|" + edges: + - name: HAS_MODERATOR # FORUM_HAS_MODERATOR_PERSON + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/forum_containerOf_post.csv + csv: + delimiter: "|" + edges: + - name: CONTAINER_OF # FORUM_CONTAINER_OF_POST + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/forum_hasTag_tag.csv + csv: + delimiter: "|" + edges: + - name: HAS_TAG # FORUM_HAS_TAG_TAG + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/post_isLocatedIn_place.csv + csv: + delimiter: "|" + edges: + - name: IS_LOCATED_IN # POST_IS_LOCATED_IN_PLACE + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/post_hasCreator_person.csv + csv: + delimiter: "|" + edges: + - name: POST_HAS_CREATOR # POST_HAS_CREATOR_PERSON + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/post_hasTag_tag.csv + csv: + delimiter: "|" + edges: + - name: HAS_TAG # POST_HAS_TAG_TAG + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/comment_isLocatedIn_place.csv + csv: + delimiter: "|" + edges: + - name: IS_LOCATED_IN # COMMENT_IS_LOCATED_IN_PLACE + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/comment_hasCreator_person.csv + csv: + delimiter: "|" + edges: + - name: COMMENT_HAS_CREATOR # COMMENT_HAS_CREATOR_PERSON + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/comment_replyOf_post.csv + csv: + delimiter: "|" + edges: + - name: REPLY_OF_POST # COMMENT_REPLY_OF_POST + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/comment_replyOf_comment.csv + csv: + delimiter: "|" + edges: + - name: REPLY_OF_COMMENT # COMMENT_REPLY_OF_COMMENT + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/dynamic/comment_hasTag_tag.csv + csv: + delimiter: "|" + edges: + - name: HAS_TAG # COMMENT_HAS_TAG_TAG + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/static/tag_hasType_tagclass.csv + csv: + delimiter: "|" + edges: + - name: HAS_TYPE # TAG_HAS_TYPE_TAGCLASS + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + + - path: ./social_network/static/tagclass_isSubclassOf_tagclass.csv + csv: + delimiter: "|" + edges: + - name: IS_SUBCLASS_OF # TAGCLASS_IS_SUBCLASS_OF_TAGCLASS + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/examples/s3/s3.v3.yaml b/examples/s3/s3.v3.yaml new file mode 100644 index 00000000..8b27dd76 --- /dev/null +++ b/examples/s3/s3.v3.yaml @@ -0,0 +1,70 @@ +client: + version: v3 + address: "127.0.0.1:9669" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: s3_examples + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + CREATE SPACE IF NOT EXISTS `s3_examples`(PARTITION_NUM = 20, REPLICA_FACTOR = 1, vid_type = INT); + USE `s3_examples`; + CREATE TAG IF NOT EXISTS `Tag`(`date` STRING); + wait: 10s + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + fields: + - key: app + value: nebula-importer + +sources: + - s3: # AWS s3 + region: us-east-1 + bucket: gdelt-open-data + key: events/20190918.export.csv + # accessKey: "" + # secretKey: "" + csv: + delimiter: "\t" + tags: + - name: Tag + id: + type: "INT" + index: 0 + props: + - name: "date" + type: "STRING" + index: 1 + - s3: # MinIO + endpoint: http://127.0.0.1:9000/ + region: us-east-1 + bucket: gdelt-open-data + key: events/20190918.export.csv + accessKey: "accessKey" + secretKey: "secretKey" + csv: + delimiter: "\t" + tags: + - name: Tag + id: + type: "INT" + index: 0 + props: + - name: "date" + type: "STRING" + index: 1 diff --git a/examples/sf/sf.yaml b/examples/sf/sf.yaml deleted file mode 100644 index 6ee1ab1c..00000000 --- a/examples/sf/sf.yaml +++ /dev/null @@ -1,873 +0,0 @@ -version: v2 -description: ldbc -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 48 # number of graph clients - channelBufferSize: 1280 - space: sf_test - connection: - user: root - password: nebula - address: 192.168.8.142:9669 - postStart: - commands: | - CREATE SPACE IF NOT EXISTS sf_test(PARTITION_NUM = 120, REPLICA_FACTOR = 1, vid_type = fixed_string(32)); - USE sf_test; - CREATE TAG IF NOT EXISTS `Comment`(`creationDate` datetime,`locationIP` string,`browserUsed` string,`content` string,`length` int); - CREATE TAG IF NOT EXISTS `Forum`(`title` string,`creationDate` datetime); - CREATE TAG IF NOT EXISTS `Tag`(`name` string,`url` string); - CREATE TAG IF NOT EXISTS `Organisation`(`type` string,`name` string,`url` string); - CREATE TAG IF NOT EXISTS `Post`(`imageFile` string,`creationDate` datetime,`locationIP` string,`browserUsed` string,`language` string,`content` string,`length` int); - CREATE TAG IF NOT EXISTS `Person`(`firstName` string,`lastName` string,`gender` string,`birthday` string,`creationDate` datetime,`locationIP` string,`browserUsed` string); - CREATE TAG IF NOT EXISTS `Place`(`name` string,`url` string,`type` string); - CREATE TAG IF NOT EXISTS `Tagclass`(`name` string,`url` string); - CREATE EDGE IF NOT EXISTS `HAS_MEMBER`(`joinDate` datetime); - CREATE EDGE IF NOT EXISTS `HAS_TAG`(); - CREATE EDGE IF NOT EXISTS `STUDY_AT`(`classYear` int); - CREATE EDGE IF NOT EXISTS `IS_PART_OF`(); - CREATE EDGE IF NOT EXISTS `IS_LOCATED_IN`(); - CREATE EDGE IF NOT EXISTS `WORK_AT`(`workFrom` int); - CREATE EDGE IF NOT EXISTS `CONTAINER_OF`(); - CREATE EDGE IF NOT EXISTS `IS_SUBCLASS_OF`(); - CREATE EDGE IF NOT EXISTS `HAS_MODERATOR`(); - CREATE EDGE IF NOT EXISTS `HAS_TYPE`(); - CREATE EDGE IF NOT EXISTS `KNOWS`(`creationDate` datetime); - CREATE EDGE IF NOT EXISTS `HAS_INTEREST`(); - CREATE EDGE IF NOT EXISTS `COMMENT_HAS_CREATOR`(`creationDate` datetime,`locationIP` string,`browserUsed` string,`content` string,`length` int64); - CREATE EDGE IF NOT EXISTS `REPLY_OF_COMMENT`(); - CREATE EDGE IF NOT EXISTS `LIKES_COMMENT`(`creationDate` datetime); - CREATE EDGE IF NOT EXISTS `POST_HAS_CREATOR`(`imageFile` string,`creationDate` datetime,`locationIP` string,`browserUsed` string,`language` string,`content` string,`length` int64); - CREATE EDGE IF NOT EXISTS `REPLY_OF_POST`(); - CREATE EDGE IF NOT EXISTS `LIKES_POST`(`creationDate` datetime); - - afterPeriod: 20s -logPath: ./err/test.log -files: - - path: ./social_network/dynamic/person_final.csv - failDataPath: ./err/data/Person - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: p- - tags: - - name: Person - props: - - name: firstName - type: string - index: 1 - - name: lastName - type: string - index: 2 - - name: gender - type: string - index: 3 - - name: birthday - type: string - index: 4 - - name: creationDate - type: datetime - index: 5 - - name: locationIP - type: string - index: 6 - - name: browserUsed - type: string - index: 7 - - - path: ./social_network/dynamic/forum.csv - failDataPath: ./err/data/Forum - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: f- - tags: - - name: Forum - props: - - name: title - type: string - index: 1 - - name: creationDate - type: datetime - index: 2 - - - path: ./social_network/dynamic/comment.csv - failDataPath: ./err/data/Comment - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: c- - tags: - - name: Comment - props: - - name: creationDate - type: datetime - index: 1 - - name: locationIP - type: string - index: 2 - - name: browserUsed - type: string - index: 3 - - name: content - type: string - index: 4 - - name: length - type: int - index: 5 - - - path: ./social_network/dynamic/post.csv - failDataPath: ./err/data/Post - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: s- - tags: - - name: Post - props: - - name: imageFile - type: string - index: 1 - - name: creationDate - type: datetime - index: 2 - - name: locationIP - type: string - index: 3 - - name: browserUsed - type: string - index: 4 - - name: language - type: string - index: 5 - - name: content - type: string - index: 6 - - name: length - type: int - index: 7 - - - path: ./social_network/static/tagclass.csv - failDataPath: ./err/data/Tagclass - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: g- - tags: - - name: Tagclass - props: - - name: name - type: string - index: 1 - - name: url - type: string - index: 2 - - - path: ./social_network/static/organisation.csv - failDataPath: ./err/data/Organisation - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: o- - tags: - - name: Organisation - props: - - name: type - type: string - index: 1 - - name: name - type: string - index: 2 - - name: url - type: string - index: 3 - - - path: ./social_network/static/place.csv - failDataPath: ./err/data/Place - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: l- - tags: - - name: Place - props: - - name: name - type: string - index: 1 - - name: url - type: string - index: 2 - - name: type - type: string - index: 3 - - - path: ./social_network/static/tag.csv - failDataPath: ./err/data/Tag - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: vertex - vertex: - vid: - index: 0 - type: string - prefix: t- - tags: - - name: Tag - props: - - name: name - type: string - index: 1 - - name: url - type: string - index: 2 - - - path: ./social_network/dynamic/forum_hasModerator_person.csv - failDataPath: ./err/data/HAS_MODERATOR - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_MODERATOR - withRanking: false - srcVID: - index: 0 - type: string - prefix: f- - dstVID: - index: 1 - type: string - prefix: p- - props: - - - path: ./social_network/dynamic/person_likes_comment.csv - failDataPath: ./err/data/LIKES_COMMENT - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: LIKES_COMMENT - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: c- - props: - - name: creationDate - type: datetime - index: 2 - - - path: ./social_network/dynamic/forum_hasMember_person.csv - failDataPath: ./err/data/HAS_MEMBER - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_MEMBER - withRanking: false - srcVID: - index: 0 - type: string - prefix: f- - dstVID: - index: 1 - type: string - prefix: p- - props: - - name: joinDate - type: datetime - index: 2 - - - path: ./social_network/dynamic/person_likes_post.csv - failDataPath: ./err/data/LIKES_POST - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: LIKES_POST - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: s- - props: - - name: creationDate - type: datetime - index: 2 - - - path: ./social_network/dynamic/post_hasTag_tag.csv - failDataPath: ./err/data/HAS_TAG - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_TAG - withRanking: false - srcVID: - index: 0 - type: string - prefix: s- - dstVID: - index: 1 - type: string - prefix: t- - props: - - - path: ./social_network/dynamic/comment_hasTag_tag.csv - failDataPath: ./err/data/HAS_TAG - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_TAG - withRanking: false - srcVID: - index: 0 - type: string - prefix: c- - dstVID: - index: 1 - type: string - prefix: t- - props: - - - path: ./social_network/dynamic/forum_containerOf_post.csv - failDataPath: ./err/data/CONTAINER_OF - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: CONTAINER_OF - withRanking: false - srcVID: - index: 0 - type: string - prefix: f- - dstVID: - index: 1 - type: string - prefix: s- - props: - - - path: ./social_network/dynamic/person_knows_person.csv - failDataPath: ./err/data/KNOWS - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: KNOWS - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: p- - props: - - name: creationDate - type: datetime - index: 2 - - - path: ./social_network/dynamic/person_hasInterest_tag.csv - failDataPath: ./err/data/HAS_INTEREST - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_INTEREST - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: t- - props: - - - path: ./social_network/dynamic/person_workAt_organisation.csv - failDataPath: ./err/data/WORK_AT - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: WORK_AT - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: o- - props: - - name: workFrom - type: int - index: 2 - - - path: ./social_network/dynamic/person_isLocatedIn_place.csv - failDataPath: ./err/data/IS_LOCATED_IN - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_LOCATED_IN - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: l- - props: - - - path: ./social_network/dynamic/forum_hasTag_tag.csv - failDataPath: ./err/data/HAS_TAG - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_TAG - withRanking: false - srcVID: - index: 0 - type: string - prefix: f- - dstVID: - index: 1 - type: string - prefix: t- - props: - - - path: ./social_network/dynamic/comment_replyOf_post.csv - failDataPath: ./err/data/REPLY_OF_POST - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: REPLY_OF_POST - withRanking: false - srcVID: - index: 0 - type: string - prefix: c- - dstVID: - index: 1 - type: string - prefix: s- - props: - - - path: ./social_network/dynamic/post_isLocatedIn_place.csv - failDataPath: ./err/data/IS_LOCATED_IN - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_LOCATED_IN - withRanking: false - srcVID: - index: 0 - type: string - prefix: s- - dstVID: - index: 1 - type: string - prefix: l- - props: - - - path: ./social_network/dynamic/comment_replyOf_comment.csv - failDataPath: ./err/data/REPLY_OF_COMMENT - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: REPLY_OF_COMMENT - withRanking: false - srcVID: - index: 0 - type: string - prefix: c- - dstVID: - index: 1 - type: string - prefix: c- - props: - - - path: ./social_network/dynamic/comment_isLocatedIn_place.csv - failDataPath: ./err/data/IS_LOCATED_IN - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_LOCATED_IN - withRanking: false - srcVID: - index: 0 - type: string - prefix: c- - dstVID: - index: 1 - type: string - prefix: l- - props: - - - path: ./social_network/dynamic/person_studyAt_organisation.csv - failDataPath: ./err/data/STUDY_AT - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: STUDY_AT - withRanking: false - srcVID: - index: 0 - type: string - prefix: p- - dstVID: - index: 1 - type: string - prefix: o- - props: - - name: classYear - type: int - index: 2 - - - path: ./social_network/dynamic/comment_hasCreator_person_new.csv - failDataPath: ./err/data/COMMENT_HAS_CREATOR - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: COMMENT_HAS_CREATOR - withRanking: false - srcVID: - index: 0 - type: string - prefix: c- - dstVID: - index: 1 - type: string - prefix: p- - props: - - name: creationDate - type: datetime - index: 3 - - name: locationIP - type: string - index: 4 - - name: browserUsed - type: string - index: 5 - - name: content - type: string - index: 6 - - name: length - type: int - index: 7 - - - path: ./social_network/dynamic/post_hasCreator_person_new.csv - failDataPath: ./err/data/POST_HAS_CREATOR - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: POST_HAS_CREATOR - withRanking: false - srcVID: - index: 0 - type: string - prefix: s- - dstVID: - index: 1 - type: string - prefix: p- - props: - - name: imageFile - type: string - index: 3 - - name: creationDate - type: datetime - index: 4 - - name: locationIP - type: string - index: 5 - - name: browserUsed - type: string - index: 6 - - name: language - type: string - index: 7 - - name: content - type: string - index: 8 - - name: length - type: int - index: 9 - - - path: ./social_network/static/tagclass_isSubclassOf_tagclass.csv - failDataPath: ./err/data/IS_SUBCLASS_OF - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_SUBCLASS_OF - withRanking: false - srcVID: - index: 0 - type: string - prefix: g- - dstVID: - index: 1 - type: string - prefix: g- - props: - - - path: ./social_network/static/place_isPartOf_place.csv - failDataPath: ./err/data/IS_PART_OF - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_PART_OF - withRanking: false - srcVID: - index: 0 - type: string - prefix: l- - dstVID: - index: 1 - type: string - prefix: l- - props: - - - path: ./social_network/static/tag_hasType_tagclass.csv - failDataPath: ./err/data/HAS_TYPE - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: HAS_TYPE - withRanking: false - srcVID: - index: 0 - type: string - prefix: t- - dstVID: - index: 1 - type: string - prefix: g- - props: - - - path: ./social_network/static/organisation_isLocatedIn_place.csv - failDataPath: ./err/data/IS_LOCATED_IN - batchSize: 100 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: IS_LOCATED_IN - withRanking: false - srcVID: - index: 0 - type: string - prefix: o- - dstVID: - index: 1 - type: string - prefix: l- - props: diff --git a/examples/v1/choose-hex.csv b/examples/v1/choose-hex.csv deleted file mode 100644 index bf4117fa..00000000 --- a/examples/v1/choose-hex.csv +++ /dev/null @@ -1,6 +0,0 @@ -0x200,07101,5 -0X200,05102,3 -0x201,00102,3 -0X202,02102,3 -0x2af,0,3 --0X202,-02102,6 diff --git a/examples/v1/choose.csv b/examples/v1/choose.csv deleted file mode 100644 index 6da9f888..00000000 --- a/examples/v1/choose.csv +++ /dev/null @@ -1,7 +0,0 @@ -200,101,5 -200,102,3 -201,102,3 -202,102,3 -"hash(""ID1"")","hash(""ID2"")",4 -0,6,3 -00,047,4 diff --git a/examples/v1/course-with-header.csv b/examples/v1/course-with-header.csv deleted file mode 100644 index bb88f475..00000000 --- a/examples/v1/course-with-header.csv +++ /dev/null @@ -1,5 +0,0 @@ -:LABEL,:VID(int),course.name,building.name:string,:IGNORE,course.credits:int -+,"hash(""English"")",English,"No11 -B\",2,6 -+,"hash(""Math"")",Math,No5,1,3 --,"hash(""Math"")",Math,No5,1,3 diff --git a/examples/v1/course.csv b/examples/v1/course.csv deleted file mode 100644 index e9dd4fc8..00000000 --- a/examples/v1/course.csv +++ /dev/null @@ -1,2 +0,0 @@ -101,Math,3,No5 -102,English,6,No11 diff --git a/examples/v1/example.yaml b/examples/v1/example.yaml deleted file mode 100644 index 6e80bb85..00000000 --- a/examples/v1/example.yaml +++ /dev/null @@ -1,498 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test_v1 - connection: - user: root - password: nebula - address: graphd1:9669,graphd2:9669 - postStart: - commands: | - UPDATE CONFIGS storage:wal_ttl=3600; - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; - DROP SPACE IF EXISTS importer_test_v1; - CREATE SPACE IF NOT EXISTS importer_test_v1(partition_num=5, replica_factor=1, vid_type=int);USE importer_test_v1; - CREATE TAG course(name string, credits int); - CREATE TAG building(name string); - CREATE TAG student(name string, age int, gender string); - CREATE EDGE follow(likeness double); - CREATE EDGE choose(grade int); - CREATE TAG course_no_props(); - CREATE TAG building_no_props(); - CREATE EDGE follow_no_props(); - afterPeriod: 10s - preStop: - commands: | - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; - UPDATE CONFIGS storage:wal_ttl=86400; -logPath: ./err/test.log -files: - - path: ./choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - srcVID: - type: int - dstVID: - type: int - props: - - name: grade - type: int - - - path: ./course.csv - failDataPath: ./err/course - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - type: int - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string - - - path: ./course.csv - failDataPath: ./err/course-concat - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - type: int - concatItems: # "c1{index0}c2{index1}2" - - "c1" - - 0 - - c2 - - 1 - - "2" - function: hash - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string - - - path: ./course-with-header.csv - failDataPath: ./err/course-with-header - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: true - withLabel: true - schema: - type: vertex - - - path: ./follow-with-label.csv - failDataPath: ./err/follow-with-label - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - index: 0 - type: int - dstVID: - index: 2 - type: int - rank: - index: 3 - props: - - name: likeness - type: double - index: 1 - - - path: ./follow-with-label-and-str-vid.csv - failDataPath: ./err/follow-with-label-and-str-vid - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - index: 0 - type: int - function: hash - dstVID: - index: 2 - type: int - function: hash - rank: - index: 3 - props: - - name: likeness - type: double - index: 1 - - - path: ./follow.csv - failDataPath: ./err/follow - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - type: int - dstVID: - type: int - props: - - name: likeness - type: double - - - path: ./follow-with-header.csv - failDataPath: ./err/follow-with-header - batchSize: 2 - type: csv - csv: - withHeader: true - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - - - path: ./student.csv - failDataPath: ./err/student - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - type: int - tags: - - name: student - props: - - name: name - type: string - - name: age - type: int - - name: gender - type: string - - - path: ./student.csv - failDataPath: ./err/student_index - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - index: 1 - function: hash - tags: - - name: student - props: - - name: age - type: int - index: 2 - - name: name - type: string - index: 1 - - name: gender - type: string - - - path: ./student-with-label-and-str-vid.csv - failDataPath: ./err/student_label_str_vid - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: vertex - vertex: - vid: - index: 1 - function: hash - type: int - tags: - - name: student - props: - - name: age - type: int - index: 2 - - name: name - type: string - index: 1 - - name: gender - type: string - - - path: ./follow.csv - failDataPath: ./err/follow_index - batchSize: 2 - limit: 3 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - srcVID: - index: 0 - # function: hash - type: int - dstVID: - index: 1 - # function: hash - type: int - rank: - index: 2 - props: - - name: likeness - type: double - index: 3 - - - path: ./follow-delimiter.csv - failDataPath: ./err/follow-delimiter - batchSize: 2 - type: csv - csv: - withHeader: true - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: follow - withRanking: true - - - path: https://raw.githubusercontent.com/vesoft-inc/nebula-importer/master/examples/v2/follow.csv - failDataPath: ./err/follow_http - batchSize: 2 - limit: 3 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - srcVID: - index: 0 - function: hash - type: int - dstVID: - index: 1 - function: hash - type: int - rank: - index: 2 - props: - - name: likeness - type: double - index: 3 - - - path: ./course.csv - failDataPath: ./err/course-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - type: int - tags: - - name: course_no_props - - - path: ./course.csv - failDataPath: ./err/course-multi-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - type: int - tags: - - name: course_no_props - - name: building_no_props - - - path: ./course.csv - failDataPath: ./err/course-mix-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - type: int - tags: - - name: course_no_props - - name: building - props: - - name: name - type: string - index: 3 - - - path: ./course.csv - failDataPath: ./err/course-mix-empty-props-2 - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - type: int - tags: - - name: building - props: - - name: name - type: string - index: 3 - - name: course_no_props - - - - path: ./follow.csv - failDataPath: ./err/follow-empty-props - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: edge - edge: - name: follow_no_props - withRanking: false - dstVID: - index: 1 - type: int - srcVID: - index: 0 - type: int - - - path: ./choose-hex.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - srcVID: - index: 0 - type: int - dstVID: - index: 1 - type: int - props: - - name: grade - type: int - int: 2 - - - path: ./choose-hex.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - srcVID: - index: 0 - type: int - dstVID: - index: 1 - type: int - props: - - name: grade - type: int - int: 2 diff --git a/examples/v1/follow-delimiter.csv b/examples/v1/follow-delimiter.csv deleted file mode 100644 index 4031c942..00000000 --- a/examples/v1/follow-delimiter.csv +++ /dev/null @@ -1,4 +0,0 @@ -:DST_VID(int)|follow.likeness:double|:SRC_VID(int)|:RANK -201|92.5|200|0 -200|85.6|201|1 -202|93.2|201|2 diff --git a/examples/v1/follow-with-header.csv b/examples/v1/follow-with-header.csv deleted file mode 100644 index 2487899b..00000000 --- a/examples/v1/follow-with-header.csv +++ /dev/null @@ -1,4 +0,0 @@ -:DST_VID(int),follow.likeness:double,:SRC_VID(int),:RANK -201,92.5,200,0 -200,85.6,201,1 -202,93.2,201,2 diff --git a/examples/v1/follow-with-label-and-str-vid.csv b/examples/v1/follow-with-label-and-str-vid.csv deleted file mode 100644 index 59e90550..00000000 --- a/examples/v1/follow-with-label-and-str-vid.csv +++ /dev/null @@ -1,5 +0,0 @@ -+,一201,92.5,200,0 -+,一200,85.6,201,1 -+,一202,93.2,201,2 --,一201,92.5,200,0 --,一200,85.6,201,1 diff --git a/examples/v1/follow-with-label.csv b/examples/v1/follow-with-label.csv deleted file mode 100644 index cff9c48c..00000000 --- a/examples/v1/follow-with-label.csv +++ /dev/null @@ -1,5 +0,0 @@ -+,201,92.5,200,0 -+,200,85.6,201,1 -+,202,93.2,201,2 --,201,92.5,200,0 --,200,85.6,201,1 diff --git a/examples/v1/follow.csv b/examples/v1/follow.csv deleted file mode 100644 index d72c2ba5..00000000 --- a/examples/v1/follow.csv +++ /dev/null @@ -1,4 +0,0 @@ -200,201,0,92.5 -201,200,1,85.6 -201,202,2,93.2 -201,202,1,96.2 diff --git a/examples/v1/student-with-label-and-str-vid.csv b/examples/v1/student-with-label-and-str-vid.csv deleted file mode 100644 index 606837d4..00000000 --- a/examples/v1/student-with-label-and-str-vid.csv +++ /dev/null @@ -1,4 +0,0 @@ -+,200,Monica,16,female -+,201,Mike,18,male -+,202,Jane,17,female --,201,Mike,18,male diff --git a/examples/v1/student.csv b/examples/v1/student.csv deleted file mode 100644 index c7639d4a..00000000 --- a/examples/v1/student.csv +++ /dev/null @@ -1,3 +0,0 @@ -200,Monica,16,female -201,Mike,18,male -202,Jane,17,female diff --git a/examples/v2/basic_type_test.csv b/examples/v2/basic_type_test.csv deleted file mode 100644 index 008124a9..00000000 --- a/examples/v2/basic_type_test.csv +++ /dev/null @@ -1,12 +0,0 @@ -b1,true,-1,-2.2,-3.0,str -b2,false,0,0,0.0,0 -b3,true,1,2.0,3.3,abc -b4,false,3,2.0,3.3,0a bd -b5,true,-3,2,3,abcd efg -bnull1,,,,, -bnull2,,,,,__NULL__ -bnull3,,-4,4,4.4, -bnull4,,-4,,4.4, -bnull5,,,4,, -bnull6,,,4,4.4, -bnull7,,,,4.4, diff --git a/examples/v2/choose.csv b/examples/v2/choose.csv deleted file mode 100644 index 800951e9..00000000 --- a/examples/v2/choose.csv +++ /dev/null @@ -1,4 +0,0 @@ -x200,x101,5 -x200,y102,3 -y201,y102,3 -z202,y102,3 diff --git a/examples/v2/course-lazy-quotes.csv b/examples/v2/course-lazy-quotes.csv deleted file mode 100644 index 53938948..00000000 --- a/examples/v2/course-lazy-quotes.csv +++ /dev/null @@ -1,7 +0,0 @@ -00testLazyQuotes0,a "word",4,No1 -00testLazyQuotes1,a"1"2",4,No1 -00testLazyQuotes2,a",4,No1 -00testLazyQuotes3,a"b,4,No1 -00testLazyQuotes4,a"b,4,No1 -00testLazyQuotes5,a""b,4,No1 -00testLazyQuotes6,"a"b",4,No1 diff --git a/examples/v2/course-with-header.csv b/examples/v2/course-with-header.csv deleted file mode 100644 index a4d75cfe..00000000 --- a/examples/v2/course-with-header.csv +++ /dev/null @@ -1,5 +0,0 @@ -:LABEL,:VID(string),course.name,building.name:string,:IGNORE,course.credits:int -+,English,English,"No11 -B\",2,6 -+,Math,Math,No5,1,3 --,Math,Math,No5,1,3 diff --git a/examples/v2/course.csv b/examples/v2/course.csv deleted file mode 100644 index b0335b68..00000000 --- a/examples/v2/course.csv +++ /dev/null @@ -1,7 +0,0 @@ -x101,Math,3,No5 -y102,English,6,No11 -"z103",Chinese,1,No1 -0test,Test,2,No2 -00test,Test2,4,No3 -"000test",中国(  ),5,No10 -"0000test",中国( ),7,No10 diff --git a/examples/v2/data/course.csv b/examples/v2/data/course.csv deleted file mode 100644 index b0335b68..00000000 --- a/examples/v2/data/course.csv +++ /dev/null @@ -1,7 +0,0 @@ -x101,Math,3,No5 -y102,English,6,No11 -"z103",Chinese,1,No1 -0test,Test,2,No2 -00test,Test2,4,No3 -"000test",中国(  ),5,No10 -"0000test",中国( ),7,No10 diff --git a/examples/v2/date_test.csv b/examples/v2/date_test.csv deleted file mode 100644 index 18fec1c9..00000000 --- a/examples/v2/date_test.csv +++ /dev/null @@ -1,3 +0,0 @@ -d1,2020-01-01,18:28:23.284,2020-01-01T18:28:23.284,2020-01-01T18:28:23 -d2,2020-01-02,18:38:23.284,2020-01-11T19:28:23.284,1578770903 -dnull,,,, diff --git a/examples/v2/example.yaml b/examples/v2/example.yaml deleted file mode 100644 index c5e8a80c..00000000 --- a/examples/v2/example.yaml +++ /dev/null @@ -1,744 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test_v2 - connection: - user: root - password: nebula - address: graphd1:9669,graphd2:9669 - postStart: - commands: | - UPDATE CONFIGS storage:wal_ttl=3600; - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; - DROP SPACE IF EXISTS importer_test_v2; - CREATE SPACE IF NOT EXISTS importer_test_v2(partition_num=5, replica_factor=1, vid_type=FIXED_STRING(32));USE importer_test_v2; - CREATE TAG course(name string, credits int); - CREATE TAG building(name string); - CREATE TAG student(name string, age int, gender string); - CREATE EDGE follow(likeness double); - CREATE EDGE choose(grade int); - CREATE TAG course_no_props(); - CREATE TAG building_no_props(); - CREATE EDGE follow_no_props(); - CREATE TAG basic_type_test(b bool NULL, i int NULL, f float NULL, d double NULL, s string NULL); - CREATE EDGE edge_basic_type_test(b bool NULL, i int NULL, f float NULL, d double NULL, s string NULL); - CREATE TAG date_test(c1 date NULL, c2 time NULL, c3 datetime NULL, c4 timestamp NULL); - CREATE EDGE edge_date_test(c1 date NULL, c2 time NULL, c3 datetime NULL, c4 timestamp NULL); - CREATE TAG geography_test(any_shape geography NULL, only_point geography(point) NULL, only_linestring geography(linestring) NULL, only_polygon geography(polygon) NULL); - CREATE EDGE edge_geography_test(any_shape geography NULL, only_point geography(point) NULL, only_linestring geography(linestring) NULL, only_polygon geography(polygon) NULL) - - afterPeriod: 10s - preStop: - commands: | - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; - UPDATE CONFIGS storage:wal_ttl=86400; -logPath: ./err/test.log -files: - - path: ./choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - props: - - name: grade - type: int - - - path: ./course.csv - failDataPath: ./err/course - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string - - - path: ./course-lazy-quotes.csv - failDataPath: ./err/course-lazy-quotes - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - lazyQuotes: true - schema: - type: vertex - vertex: - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string - - - path: ./course.csv - failDataPath: ./err/course-concat - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - type: string - concatItems: # "c1{index0}c2{index1}2" - - "c1" - - 0 - - c2 - - 1 - - "2" - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string - - - path: ./course-with-header.csv - failDataPath: ./err/course-with-header - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: true - withLabel: true - schema: - type: vertex - - - path: ./follow-with-label.csv - failDataPath: ./err/follow-with-label - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - index: 0 - dstVID: - index: 2 - rank: - index: 3 - props: - - name: likeness - type: double - index: 1 - - - path: ./follow-with-label-and-str-vid.csv - failDataPath: ./err/follow-with-label-and-str-vid - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - index: 0 - # function: hash - dstVID: - index: 2 - # function: hash - rank: - index: 3 - props: - - name: likeness - type: double - index: 1 - - - path: ./follow.csv - failDataPath: ./err/follow - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - props: - - name: likeness - type: double - - - path: ./follow.csv - failDataPath: ./err/follow_test_prefix - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - srcVID: - prefix: student_ - dstVID: - prefix: student_ - props: - - name: likeness - type: double - - - path: ./glob-follow-*.csv - failDataPath: ./err/follow-glob - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - props: - - name: likeness - type: double - - - - path: ./follow-with-header.csv - failDataPath: ./err/follow-with-header - batchSize: 2 - type: csv - csv: - withHeader: true - withLabel: false - schema: - type: edge - edge: - name: follow - withRanking: true - - - path: ./student.csv - failDataPath: ./err/student - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - tags: - - name: student - props: - - name: name - type: string - - name: age - type: int - - name: gender - type: string - - - path: ./student.csv - failDataPath: ./err/student_test_prefix - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - prefix: student_ - tags: - - name: student - props: - - name: name - type: string - - name: age - type: int - - name: gender - type: string - - - path: ./student.csv - failDataPath: ./err/student_index - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - vid: - index: 1 - # function: hash - tags: - - name: student - props: - - name: age - type: int - index: 2 - - name: name - type: string - index: 1 - - name: gender - type: string - - - path: ./student-with-label-and-str-vid.csv - failDataPath: ./err/student_label_str_vid - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: true - schema: - type: vertex - vertex: - vid: - index: 1 - tags: - - name: student - props: - - name: age - type: int - index: 2 - - name: name - type: string - index: 1 - - name: gender - type: string - - - path: ./follow.csv - failDataPath: ./err/follow_index - batchSize: 2 - limit: 3 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - srcVID: - index: 0 - # function: hash - dstVID: - index: 1 - rank: - index: 2 - props: - - name: likeness - type: double - index: 3 - - - path: ./follow-delimiter.csv - failDataPath: ./err/follow-delimiter - batchSize: 2 - type: csv - csv: - withHeader: true - withLabel: false - delimiter: "|" - schema: - type: edge - edge: - name: follow - withRanking: true - - - path: https://raw.githubusercontent.com/vesoft-inc/nebula-importer/master/examples/v2/follow.csv - failDataPath: ./err/follow_http - batchSize: 2 - limit: 3 - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: follow - srcVID: - index: 0 - # function: hash - dstVID: - index: 1 - rank: - index: 2 - props: - - name: likeness - type: double - index: 3 - - - path: ./course.csv - failDataPath: ./err/course-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: course_no_props - - - path: ./course.csv - failDataPath: ./err/course-multi-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: course_no_props - - name: building_no_props - - - path: ./course.csv - failDataPath: ./err/course-mix-empty-props - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: course_no_props - - name: building - props: - - name: name - type: string - index: 3 - - - path: ./course.csv - failDataPath: ./err/course-mix-empty-props-2 - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - prefix: wxyz - tags: - - name: building - props: - - name: name - type: string - index: 3 - - name: course_no_props - - - - path: ./follow.csv - failDataPath: ./err/follow-empty-props - batchSize: 2 - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: edge - edge: - name: follow_no_props - withRanking: false - dstVID: - index: 1 - srcVID: - index: 0 - - - path: ./basic_type_test.csv - failDataPath: ./err/basic_type_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: basic_type_test - props: - - name: b - type: bool - index: 1 - nullable: true - - name: i - type: int - index: 2 - nullable: true - defaultValue: "0" - - name: f - type: float - index: 3 - nullable: true - alternativeIndices: - - 2 - - name: d - type: double - index: 4 - nullable: true - alternativeIndices: - - 3 - - 2 - defaultValue: "0" - - name: s - type: string - index: 5 - nullable: true - - - path: ./basic_type_test.csv - failDataPath: ./err/edge_basic_type_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: edge - edge: - name: edge_basic_type_test - srcVID: - index: 0 - dstVID: - index: 0 - withRanking: false - props: - - name: b - type: bool - index: 1 - nullable: true - - name: i - type: int - index: 2 - nullable: true - - name: f - type: float - index: 3 - nullable: true - alternativeIndices: - - 2 - defaultValue: "0" - - name: d - type: double - index: 4 - nullable: true - alternativeIndices: - - 3 - - 2 - - name: s - type: string - index: 5 - nullable: true - nullValue: "__NULL__" - - - path: ./date_test.csv - failDataPath: ./err/date_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: date_test - props: - - name: c1 - type: date - index: 1 - nullable: true - - name: c2 - type: time - index: 2 - nullable: true - - name: c3 - type: datetime - index: 3 - nullable: true - - name: c4 - type: timestamp - index: 4 - nullable: true - - - path: ./date_test.csv - failDataPath: ./err/edge_date_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: edge - edge: - name: edge_date_test - srcVID: - index: 0 - dstVID: - index: 0 - withRanking: false - props: - - name: c1 - type: date - index: 1 - nullable: true - - name: c2 - type: time - index: 2 - nullable: true - - name: c3 - type: datetime - index: 3 - nullable: true - - name: c4 - type: timestamp - index: 4 - nullable: true - - - path: ./geography_test.csv - failDataPath: ./err/geography_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: vertex - vertex: - vid: - index: 0 - tags: - - name: geography_test - props: - - name: any_shape - type: geography - index: 1 - nullable: true - - name: only_point - type: geography(point) - index: 2 - nullable: true - - name: only_linestring - type: geography(linestring) - index: 3 - nullable: true - - name: only_polygon - type: geography(polygon) - index: 4 - nullable: true - - - path: ./geography_test.csv - failDataPath: ./err/edge_geography_test - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - delimiter: "," - schema: - type: edge - edge: - name: edge_geography_test - srcVID: - index: 0 - dstVID: - index: 0 - withRanking: false - props: - - name: any_shape - type: geography - index: 1 - nullable: true - - name: only_point - type: geography(point) - index: 2 - nullable: true - - name: only_linestring - type: geography(linestring) - index: 3 - nullable: true - - name: only_polygon - type: geography(polygon) - index: 4 - nullable: true diff --git a/examples/v2/example_with_working_dir.yaml b/examples/v2/example_with_working_dir.yaml deleted file mode 100644 index 1208706d..00000000 --- a/examples/v2/example_with_working_dir.yaml +++ /dev/null @@ -1,44 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test_working_dir - connection: - user: root - password: nebula - address: graphd1:9669,graphd2:9669 - postStart: - commands: | - DROP SPACE IF EXISTS importer_test_working_dir; - CREATE SPACE IF NOT EXISTS importer_test_working_dir(partition_num=1, replica_factor=1, vid_type=FIXED_STRING(10)); - USE importer_test_working_dir; - CREATE TAG course(name string, credits int); - afterPeriod: 8s -workingDir: ./data/ -logPath: ./err/test.log -files: - - path: ./course.csv - failDataPath: ./err/course - batchSize: 2 - inOrder: true - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: vertex - vertex: - tags: - - name: course - props: - - name: name - type: string - - name: credits - type: int - - name: building - props: - - name: name - type: string diff --git a/examples/v2/follow-delimiter.csv b/examples/v2/follow-delimiter.csv deleted file mode 100644 index 9570ddb0..00000000 --- a/examples/v2/follow-delimiter.csv +++ /dev/null @@ -1,4 +0,0 @@ -:DST_VID(string)|follow.likeness:double|:SRC_VID(string)|:RANK -x201|92.5|y200|0 -y200|85.6|x201|1 -z202|93.2|x201|2 diff --git a/examples/v2/follow-with-header.csv b/examples/v2/follow-with-header.csv deleted file mode 100644 index 5ee62f16..00000000 --- a/examples/v2/follow-with-header.csv +++ /dev/null @@ -1,4 +0,0 @@ -:DST_VID(string),follow.likeness:double,:SRC_VID(string),:RANK -x201,92.5,y200,0 -y200,85.6,x201,1 -z202,93.2,x201,2 diff --git a/examples/v2/follow-with-label-and-str-vid.csv b/examples/v2/follow-with-label-and-str-vid.csv deleted file mode 100644 index e0e8ec01..00000000 --- a/examples/v2/follow-with-label-and-str-vid.csv +++ /dev/null @@ -1,5 +0,0 @@ -+,一y201,92.5,x200,0 -+,一x200,85.6,y201,1 -+,一202,93.2,y201,2 --,一y201,92.5,x200,0 --,一x200,85.6,y201,1 diff --git a/examples/v2/follow-with-label.csv b/examples/v2/follow-with-label.csv deleted file mode 100644 index ce3fd1ad..00000000 --- a/examples/v2/follow-with-label.csv +++ /dev/null @@ -1,5 +0,0 @@ -+,x201,92.5,y200,0 -+,y200,85.6,x201,1 -+,z202,93.2,x201,2 --,x201,92.5,y200,0 --,y200,85.6,x201,1 diff --git a/examples/v2/follow.csv b/examples/v2/follow.csv deleted file mode 100644 index f6a137ad..00000000 --- a/examples/v2/follow.csv +++ /dev/null @@ -1,4 +0,0 @@ -x200,y201,0,92.5 -y201,x200,1,85.6 -y201,z202,2,93.2 -y201,z202,1,96.2 diff --git a/examples/v2/geography_test.csv b/examples/v2/geography_test.csv deleted file mode 100644 index b4deff31..00000000 --- a/examples/v2/geography_test.csv +++ /dev/null @@ -1,4 +0,0 @@ -g1,POINT (-82.3764154 42.6452196),"Point(3 8)","LINEstring (-108.7 35.0,-100.0 46.5,-90.7 34.9,-108.7 35.0)","POlygon ( (-100.1 41.4,-102.9 37.6,-96.8 37.5,-100.1 41.4))" -g2,"LineString(0 1, 1 2, 2 3)","point(4.6 5.7 )","LINESTRING(43.8 52.6, -78.99 84.323)","POLYGON ((-108.7 35.0,-100.0 46.5,-90.7 34.9,-108.7 35.0))" -g3,"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))", Point(0.0 0.0),"linestring(0 1, 179.99 89.99)","polygon((0 1, 2 4, 3 5, 4 9, 0 1))" -gnull,,,, diff --git a/examples/v2/glob-follow-1.csv b/examples/v2/glob-follow-1.csv deleted file mode 100644 index 06a37b4c..00000000 --- a/examples/v2/glob-follow-1.csv +++ /dev/null @@ -1,3 +0,0 @@ -x200,y201,0,92.5 -y201,x200,1,85.6 -y201,z202,2,93.2 diff --git a/examples/v2/glob-follow-2.csv b/examples/v2/glob-follow-2.csv deleted file mode 100644 index b91581c1..00000000 --- a/examples/v2/glob-follow-2.csv +++ /dev/null @@ -1 +0,0 @@ -y201,z202,1,96.2 diff --git a/examples/v2/student-with-label-and-str-vid.csv b/examples/v2/student-with-label-and-str-vid.csv deleted file mode 100644 index 0ab2daa5..00000000 --- a/examples/v2/student-with-label-and-str-vid.csv +++ /dev/null @@ -1,4 +0,0 @@ -+,x200,Monica,16,female -+,y201,Mike,18,male -+,z202,Jane,17,female --,y201,Mike,18,male diff --git a/examples/v2/student.csv b/examples/v2/student.csv deleted file mode 100644 index 497b3fb4..00000000 --- a/examples/v2/student.csv +++ /dev/null @@ -1,3 +0,0 @@ -x200,Monica,16,female -y201,Mike,18,male -z202,Jane,17,female diff --git a/go.mod b/go.mod index 7f95b053..213fb123 100644 --- a/go.mod +++ b/go.mod @@ -1,14 +1,56 @@ -module github.com/vesoft-inc/nebula-importer/v3 +module github.com/vesoft-inc/nebula-importer/v4 + +go 1.19 require ( + github.com/agiledragon/gomonkey/v2 v2.9.0 + github.com/aliyun/aliyun-oss-go-sdk v2.2.6+incompatible + github.com/aws/aws-sdk-go v1.44.178 github.com/cenkalti/backoff/v4 v4.1.3 - github.com/davecgh/go-spew v1.1.1 // indirect + github.com/colinmarc/hdfs/v2 v2.3.0 + github.com/dustin/go-humanize v1.0.0 + github.com/fclairamb/ftpserverlib v0.21.0 + github.com/golang/mock v1.6.0 + github.com/jlaffaye/ftp v0.1.0 + github.com/onsi/ginkgo/v2 v2.4.0 + github.com/onsi/gomega v1.24.0 + github.com/panjf2000/ants v1.2.1 + github.com/pkg/errors v0.9.1 + github.com/pkg/sftp v1.13.5 + github.com/spf13/afero v1.9.3 + github.com/spf13/cobra v1.6.1 + github.com/valyala/bytebufferpool v1.0.0 + github.com/vesoft-inc/nebula-go/v3 v3.3.1 + go.uber.org/zap v1.23.0 + golang.org/x/crypto v0.5.0 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/facebook/fbthrift v0.31.1-0.20211129061412-801ed7f9f295 // indirect + github.com/fclairamb/go-log v0.4.1 // indirect + github.com/go-logr/logr v1.2.3 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect + github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/hashicorp/go-uuid v1.0.2 // indirect + github.com/inconshreveable/mousetrap v1.0.1 // indirect + github.com/jcmturner/aescts/v2 v2.0.0 // indirect + github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect + github.com/jcmturner/gofork v1.0.0 // indirect + github.com/jcmturner/goidentity/v6 v6.0.1 // indirect + github.com/jcmturner/gokrb5/v8 v8.4.2 // indirect + github.com/jcmturner/rpc/v2 v2.0.3 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/kr/fs v0.1.0 // indirect github.com/kr/text v0.2.0 // indirect - github.com/stretchr/testify v1.7.0 - github.com/vesoft-inc/nebula-go/v3 v3.4.0 + github.com/spf13/pflag v1.0.5 // indirect + go.uber.org/atomic v1.7.0 // indirect + go.uber.org/multierr v1.6.0 // indirect + golang.org/x/net v0.5.0 // indirect + golang.org/x/sys v0.4.0 // indirect + golang.org/x/text v0.6.0 // indirect + golang.org/x/time v0.3.0 // indirect + google.golang.org/protobuf v1.28.1 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/yaml.v2 v2.4.0 - gopkg.in/yaml.v3 v3.0.1 // indirect ) - -go 1.13 diff --git a/go.sum b/go.sum index c0d69fad..5fa94804 100644 --- a/go.sum +++ b/go.sum @@ -1,31 +1,583 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/agiledragon/gomonkey/v2 v2.9.0 h1:PDiKKybR596O6FHW+RVSG0Z7uGCBNbmbUXh3uCNQ7Hc= +github.com/agiledragon/gomonkey/v2 v2.9.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= +github.com/aliyun/aliyun-oss-go-sdk v2.2.6+incompatible h1:KXeJoM1wo9I/6xPTyt6qCxoSZnmASiAjlrr0dyTUKt8= +github.com/aliyun/aliyun-oss-go-sdk v2.2.6+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= +github.com/aws/aws-sdk-go v1.44.178 h1:4igreoWPEA7xVLnOeSXLhDXTsTSPKQONZcQ3llWAJw0= +github.com/aws/aws-sdk-go v1.44.178/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/colinmarc/hdfs/v2 v2.3.0 h1:tMxOjXn6+7iPUlxAyup9Ha2hnmLe3Sv5DM2qqbSQ2VY= +github.com/colinmarc/hdfs/v2 v2.3.0/go.mod h1:nsyY1uyQOomU34KVQk9Qb/lDJobN1MQ/9WS6IqcVZno= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/facebook/fbthrift v0.31.1-0.20211129061412-801ed7f9f295 h1:ZA+qQ3d2In0RNzVpk+D/nq1sjDSv+s1Wy2zrAPQAmsg= github.com/facebook/fbthrift v0.31.1-0.20211129061412-801ed7f9f295/go.mod h1:2tncLx5rmw69e5kMBv/yJneERbzrr1yr5fdlnTbu8lU= +github.com/fclairamb/ftpserverlib v0.21.0 h1:QO4ex827FU6Y7FNi1cj4dmAs6bcmy+UtWcX5yzVzFAw= +github.com/fclairamb/ftpserverlib v0.21.0/go.mod h1:03sR5yGPYyUH/8hFKML02SVNLY7A//3qIy0q0ZJGhTw= +github.com/fclairamb/go-log v0.4.1 h1:rLtdSG9x2pK41AIAnE8WYpl05xBJfw1ZyYxZaXFcBsM= +github.com/fclairamb/go-log v0.4.1/go.mod h1:sw1KvnkZ4wKCYkvy4SL3qVZcJSWFP8Ure4pM3z+KNn4= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= +github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= +github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= +github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8= +github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.2 h1:6ZIM6b/JJN0X8UM43ZOM6Z4SJzla+a/u7scXFJzodkA= +github.com/jcmturner/gokrb5/v8 v8.4.2/go.mod h1:sb+Xq/fTY5yktf/VxLsE3wlfPqQjp0aWNYyvBVK62bc= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jlaffaye/ftp v0.1.0 h1:DLGExl5nBoSFoNshAUHwXAezXwXBvFdx7/qwhucWNSE= +github.com/jlaffaye/ftp v0.1.0/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/onsi/ginkgo/v2 v2.4.0 h1:+Ig9nvqgS5OBSACXNk15PLdp0U9XPYROt9CFzVdFGIs= +github.com/onsi/ginkgo/v2 v2.4.0/go.mod h1:iHkDK1fKGcBoEHT5W7YBq4RFWaQulw+caOMkAt4OrFo= +github.com/onsi/gomega v1.24.0 h1:+0glovB9Jd6z3VR+ScSwQqXVTIfJcGA9UBM8yzQxhqg= +github.com/onsi/gomega v1.24.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg= +github.com/panjf2000/ants v1.2.1 h1:IlhLREssFi+YFOITnHdH3FHhulY6WDS0OB9e7+3fMHk= +github.com/panjf2000/ants v1.2.1/go.mod h1:AaACblRPzq35m1g3enqYcxspbbiOJJYaxU2wMpm1cXY= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go= +github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/secsy/goftp v0.0.0-20200609142545-aa2de14babf4 h1:PT+ElG/UUFMfqy5HrxJxNzj3QBOf7dZwupeVC+mG1Lo= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.9.3 h1:41FoI0fD7OR7mGcKE/aOiLkGreyf8ifIOQmJANWogMk= +github.com/spf13/afero v1.9.3/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= +github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/vesoft-inc/nebula-go/v3 v3.0.0-20220425030225-cdb52399b40a h1:/8l9RT6gU0cUS1Cgzqv3A9dKto19VQBjVk1BqAAqqvM= -github.com/vesoft-inc/nebula-go/v3 v3.0.0-20220425030225-cdb52399b40a/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= -github.com/vesoft-inc/nebula-go/v3 v3.4.0 h1:7q2DSW4QABwI2oGPSVuC+Ql7kGwj26G/YVPGD7gETys= -github.com/vesoft-inc/nebula-go/v3 v3.4.0/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/vesoft-inc/nebula-go/v3 v3.3.1 h1:5DxUxswEQvK9gkK6Y/X4fhX+bmIeHIJrn+b2q7tE3HM= +github.com/vesoft-inc/nebula-go/v3 v3.3.1/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= +go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= +go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.4.0 h1:O7UWfv5+A2qiuulQk30kVinPoMtoIPeVaKLEgLpVkvg= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/pkg/base/stats.go b/pkg/base/stats.go deleted file mode 100644 index 6d00d64e..00000000 --- a/pkg/base/stats.go +++ /dev/null @@ -1,55 +0,0 @@ -package base - -type StatType int - -const ( - SUCCESS StatType = 0 - FAILURE StatType = 1 - FILEDONE StatType = 2 - OUTPUT StatType = 3 -) - -const STAT_FILEDONE string = "FILEDONE" - -type Stats struct { - Type StatType - Latency int64 - ReqTime int64 - BatchSize int - ImportedBytes int64 - Filename string -} - -func NewSuccessStats(latency int64, reqTime int64, batchSize int, importedBytes int64) Stats { - return Stats{ - Type: SUCCESS, - Latency: latency, - ReqTime: reqTime, - BatchSize: batchSize, - ImportedBytes: importedBytes, - } -} - -func NewFailureStats(batchSize int, importedBytes int64) Stats { - return Stats{ - Type: FAILURE, - BatchSize: batchSize, - ImportedBytes: importedBytes, - } -} - -func NewFileDoneStats(filename string) Stats { - // When goto this step, we have finished configure file validation - // and it's safe to ignore following error - fpath, _ := FormatFilePath(filename) - return Stats{ - Type: FILEDONE, - Filename: fpath, - } -} - -func NewOutputStats() Stats { - return Stats{ - Type: OUTPUT, - } -} diff --git a/pkg/base/stoppable.go b/pkg/base/stoppable.go deleted file mode 100644 index bff558ce..00000000 --- a/pkg/base/stoppable.go +++ /dev/null @@ -1,5 +0,0 @@ -package base - -type Stoppable interface { - Stop() -} diff --git a/pkg/base/tools.go b/pkg/base/tools.go deleted file mode 100644 index 76af6a8a..00000000 --- a/pkg/base/tools.go +++ /dev/null @@ -1,66 +0,0 @@ -package base - -import ( - "fmt" - "net/url" - "os" - "path" - "strings" -) - -func MustCreateFile(filePath string) *os.File { - if err := os.MkdirAll(path.Dir(filePath), 0775); err != nil && !os.IsExist(err) { - panic(err) - } - file, err := os.Create(filePath) - if err != nil { - panic(err) - } - return file -} - -func FileExists(filename string) bool { - info, err := os.Stat(filename) - if os.IsNotExist(err) { - return false - } - return !info.IsDir() -} - -func IsValidType(t string) bool { - switch strings.ToLower(t) { - case "string", "int", "float", "double", "bool", "date", "time", "datetime", "timestamp", "geography", "geography(point)", "geography(linestring)", "geography(polygon)": - return true - default: - return false - } -} - -func HasHttpPrefix(path string) bool { - return strings.HasPrefix(path, "https://") || strings.HasPrefix(path, "http://") -} - -func ExtractFilename(uri string) (local bool, filename string, err error) { - if !HasHttpPrefix(uri) { - local, filename, err = true, uri, nil - return - } - - local = false - base := path.Base(uri) - if index := strings.Index(base, "?"); index != -1 { - filename, err = url.QueryUnescape(base[:index]) - } else { - filename, err = url.QueryUnescape(base) - } - return -} - -func FormatFilePath(filepath string) (path string, err error) { - local, path, err := ExtractFilename(filepath) - if local || err != nil { - return - } - path = fmt.Sprintf("http(s)://**/%s", path) - return -} diff --git a/pkg/base/tools_test.go b/pkg/base/tools_test.go deleted file mode 100644 index cffb9505..00000000 --- a/pkg/base/tools_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package base - -import ( - "os" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFileExists(t *testing.T) { - fileName := "test.csv" - var isExist bool - isExist = FileExists(fileName) - assert.False(t, isExist) - file := MustCreateFile(fileName) - isExist = FileExists(fileName) - assert.True(t, isExist) - file.Close() - os.Remove(fileName) -} - -func TestIsValidType(t *testing.T) { - assert.True(t, IsValidType("string")) - assert.True(t, IsValidType("String")) - assert.True(t, IsValidType("STRING")) - assert.True(t, IsValidType("sTring")) - assert.True(t, IsValidType("int")) - assert.True(t, IsValidType("float")) - assert.True(t, IsValidType("date")) - assert.False(t, IsValidType("byte")) - assert.True(t, IsValidType("datetime")) - assert.True(t, IsValidType("bool")) - assert.True(t, IsValidType("timestamp")) - assert.True(t, IsValidType("double")) - assert.True(t, IsValidType("geography")) - assert.True(t, IsValidType("geography(point)")) - assert.True(t, IsValidType("geography(linestring)")) - assert.True(t, IsValidType("geography(polygon)")) -} diff --git a/pkg/base/types.go b/pkg/base/types.go deleted file mode 100644 index 864df476..00000000 --- a/pkg/base/types.go +++ /dev/null @@ -1,96 +0,0 @@ -package base - -type Stmt struct { - Stmt string - Data [][]interface{} -} - -type Record []string - -type OpType int - -const ( - DONE OpType = 0 - INSERT OpType = 1 - DELETE OpType = 2 - HEADER OpType = 100 -) - -func (op OpType) String() string { - switch op { - case 0: - return "DONE" - case 1: - return "INSERT" - case 2: - return "DELETE" - case 100: - return "HEADER" - default: - return "UNKNOWN" - } -} - -type Data struct { - Type OpType - Record Record - Bytes int -} - -func InsertData(record Record, bytes int) Data { - return Data{ - Type: INSERT, - Record: record, - Bytes: bytes, - } -} - -func DeleteData(record Record, bytes int) Data { - return Data{ - Type: DELETE, - Record: record, - Bytes: bytes, - } -} - -func HeaderData(record Record, bytes int) Data { - return Data{ - Type: HEADER, - Record: record, - Bytes: bytes, - } -} - -var done = Data{ - Type: DONE, - Record: nil, -} - -func FinishData() Data { - return done -} - -type ErrData struct { - Error error - Data []Data -} - -type ResponseData struct { - Error error - Stats Stats -} - -type ClientRequest struct { - Stmt string - ErrCh chan<- ErrData - Data []Data -} - -const ( - LABEL_LABEL = ":LABEL" - LABEL_VID = ":VID" - LABEL_SRC_VID = ":SRC_VID" - LABEL_DST_VID = ":DST_VID" - LABEL_RANK = ":RANK" - LABEL_IGNORE = ":IGNORE" -) diff --git a/pkg/bytebufferpool/bytebufferpool.go b/pkg/bytebufferpool/bytebufferpool.go new file mode 100644 index 00000000..ae413766 --- /dev/null +++ b/pkg/bytebufferpool/bytebufferpool.go @@ -0,0 +1,69 @@ +package bytebufferpool + +import ( + bbp "github.com/valyala/bytebufferpool" +) + +type ( + ByteBuffer bbp.ByteBuffer +) + +func Get() *ByteBuffer { + return (*ByteBuffer)(bbp.Get()) +} + +func Put(b *ByteBuffer) { + bbp.Put((*bbp.ByteBuffer)(b)) +} + +func (b *ByteBuffer) Len() int { + return (*bbp.ByteBuffer)(b).Len() +} + +func (b *ByteBuffer) Bytes() []byte { + return (*bbp.ByteBuffer)(b).Bytes() +} +func (b *ByteBuffer) Write(p []byte) (int, error) { + return (*bbp.ByteBuffer)(b).Write(p) +} + +func (b *ByteBuffer) WriteString(s string) (int, error) { + return (*bbp.ByteBuffer)(b).WriteString(s) +} + +func (b *ByteBuffer) Set(p []byte) { + (*bbp.ByteBuffer)(b).Set(p) +} + +func (b *ByteBuffer) SetString(s string) { + (*bbp.ByteBuffer)(b).SetString(s) +} + +func (b *ByteBuffer) String() string { + return (*bbp.ByteBuffer)(b).String() +} + +func (b *ByteBuffer) Reset() { + (*bbp.ByteBuffer)(b).Reset() +} + +func (b *ByteBuffer) WriteStringSlice(elems []string, sep string) (n int, err error) { + switch len(elems) { + case 0: + return 0, nil + case 1: + return b.WriteString(elems[0]) + } + return b.writeStringSliceSlow(elems, sep) +} + +func (b *ByteBuffer) writeStringSliceSlow(elems []string, sep string) (int, error) { + n, _ := b.WriteString(elems[0]) + for _, s := range elems[1:] { + n1, _ := b.WriteString(sep) + n += n1 + n1, _ = b.WriteString(s) + n += n1 + } + return n, nil +} diff --git a/pkg/bytebufferpool/bytebufferpool_suite_test.go b/pkg/bytebufferpool/bytebufferpool_suite_test.go new file mode 100644 index 00000000..8c1ec036 --- /dev/null +++ b/pkg/bytebufferpool/bytebufferpool_suite_test.go @@ -0,0 +1,13 @@ +package bytebufferpool + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestConfig(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg bytebufferpool Suite") +} diff --git a/pkg/bytebufferpool/bytebufferpool_test.go b/pkg/bytebufferpool/bytebufferpool_test.go new file mode 100644 index 00000000..94ac0c21 --- /dev/null +++ b/pkg/bytebufferpool/bytebufferpool_test.go @@ -0,0 +1,63 @@ +package bytebufferpool + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ByteBuffer", func() { + It("", func() { + buff := Get() + defer Put(buff) + + Expect(buff).NotTo(BeNil()) + Expect(buff.Len()).To(Equal(0)) + Expect(buff.Bytes()).To(Equal([]byte(nil))) + Expect(buff.String()).To(Equal("")) + + buff.Write([]byte("a")) //nolint:gocritic + Expect(buff.Len()).To(Equal(1)) + Expect(buff.Bytes()).To(Equal([]byte("a"))) + Expect(buff.String()).To(Equal("a")) + + buff.WriteString("b") + Expect(buff.Len()).To(Equal(2)) + Expect(buff.Bytes()).To(Equal([]byte("ab"))) + Expect(buff.String()).To(Equal("ab")) + + buff.Set([]byte("c")) + Expect(buff.Len()).To(Equal(1)) + Expect(buff.Bytes()).To(Equal([]byte("c"))) + Expect(buff.String()).To(Equal("c")) + + buff.SetString("d") + Expect(buff.Len()).To(Equal(1)) + Expect(buff.Bytes()).To(Equal([]byte("d"))) + Expect(buff.String()).To(Equal("d")) + + buff.Reset() + Expect(buff.Len()).To(Equal(0)) + Expect(buff.Bytes()).To(Equal([]byte(nil))) + Expect(buff.String()).To(Equal("")) + + buff.WriteStringSlice(nil, ",") + Expect(buff.Len()).To(Equal(0)) + Expect(buff.Bytes()).To(Equal([]byte(nil))) + Expect(buff.String()).To(Equal("")) + + buff.WriteStringSlice([]string{}, ",") + Expect(buff.Len()).To(Equal(0)) + Expect(buff.Bytes()).To(Equal([]byte(nil))) + Expect(buff.String()).To(Equal("")) + + buff.WriteStringSlice([]string{"a"}, ",") + Expect(buff.Len()).To(Equal(1)) + Expect(buff.Bytes()).To(Equal([]byte("a"))) + Expect(buff.String()).To(Equal("a")) + + buff.WriteStringSlice([]string{"b", "c", "d"}, ",") + Expect(buff.Len()).To(Equal(6)) + Expect(buff.Bytes()).To(Equal([]byte("ab,c,d"))) + Expect(buff.String()).To(Equal("ab,c,d")) + }) +}) diff --git a/pkg/client/client.go b/pkg/client/client.go new file mode 100644 index 00000000..e3a4cf59 --- /dev/null +++ b/pkg/client/client.go @@ -0,0 +1,138 @@ +//go:generate mockgen -source=client.go -destination client_mock.go -package client Client +package client + +import ( + "strconv" + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/cenkalti/backoff/v4" +) + +type ( + Client interface { + Open() error + Execute(statement string) (Response, error) + Close() error + } + + HostAddress struct { + Host string + Port int + } + + defaultClient struct { + *options + session Session + } +) + +func NewClient(opts ...Option) Client { + return newClientWithOptions(newOptions(opts...)) +} + +func newClientWithOptions(o *options) Client { + return &defaultClient{ + options: o, + } +} + +func (c *defaultClient) Open() error { + if len(c.addresses) == 0 { + return errors.ErrNoAddresses + } + hostPort := strings.Split(c.addresses[0], ":") + if len(hostPort) != 2 { + return errors.ErrInvalidAddress + } + if hostPort[0] == "" { + return errors.ErrInvalidAddress + } + port, err := strconv.Atoi(hostPort[1]) + if err != nil { + return errors.ErrInvalidAddress + } + hostAddress := HostAddress{Host: hostPort[0], Port: port} + + session := c.fnNewSession(hostAddress) + if err = session.Open(); err != nil { + return err + } + + c.session = session + + if c.clientInitFunc != nil { + if err = c.clientInitFunc(c); err != nil { + c.session = nil + _ = session.Close() + return err + } + } + + return nil +} + +func (c *defaultClient) Execute(statement string) (Response, error) { + exp := backoff.NewExponentialBackOff() + exp.InitialInterval = c.retryInitialInterval + exp.MaxInterval = DefaultRetryMaxInterval + exp.MaxElapsedTime = DefaultRetryMaxElapsedTime + exp.Multiplier = DefaultRetryMultiplier + exp.RandomizationFactor = DefaultRetryRandomizationFactor + + var ( + err error + resp Response + retry = c.retry + ) + + // There are three cases of retry + // * Case 1: retry no more + // * Case 2. retry as much as possible + // * Case 3: retry with limit times + _ = backoff.Retry(func() error { + resp, err = c.session.Execute(statement) + if err == nil && resp.IsSucceed() { + return nil + } + retryErr := err + if resp != nil { + retryErr = resp.GetError() + + // Case 1: retry no more + if resp.IsPermanentError() { + // stop the retry + return backoff.Permanent(retryErr) + } + + // Case 2. retry as much as possible + if resp.IsRetryMoreError() { + retry = c.retry + return retryErr + } + } + + // Case 3: retry with limit times + if retry <= 0 { + // stop the retry + return backoff.Permanent(retryErr) + } + retry-- + return retryErr + }, exp) + if err != nil { + c.logger.WithError(err).Error("execute statement failed") + } + return resp, err +} + +func (c *defaultClient) Close() error { + if c.session != nil { + if err := c.session.Close(); err != nil { + return err + } + c.session = nil + } + return nil +} diff --git a/pkg/client/client_mock.go b/pkg/client/client_mock.go new file mode 100644 index 00000000..898a3579 --- /dev/null +++ b/pkg/client/client_mock.go @@ -0,0 +1,77 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: client.go + +// Package client is a generated GoMock package. +package client + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockClient is a mock of Client interface. +type MockClient struct { + ctrl *gomock.Controller + recorder *MockClientMockRecorder +} + +// MockClientMockRecorder is the mock recorder for MockClient. +type MockClientMockRecorder struct { + mock *MockClient +} + +// NewMockClient creates a new mock instance. +func NewMockClient(ctrl *gomock.Controller) *MockClient { + mock := &MockClient{ctrl: ctrl} + mock.recorder = &MockClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockClient) EXPECT() *MockClientMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockClient) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockClientMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockClient)(nil).Close)) +} + +// Execute mocks base method. +func (m *MockClient) Execute(statement string) (Response, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Execute", statement) + ret0, _ := ret[0].(Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Execute indicates an expected call of Execute. +func (mr *MockClientMockRecorder) Execute(statement interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Execute", reflect.TypeOf((*MockClient)(nil).Execute), statement) +} + +// Open mocks base method. +func (m *MockClient) Open() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open") + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *MockClientMockRecorder) Open() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockClient)(nil).Open)) +} diff --git a/pkg/client/client_suite_test.go b/pkg/client/client_suite_test.go new file mode 100644 index 00000000..3a2ee85f --- /dev/null +++ b/pkg/client/client_suite_test.go @@ -0,0 +1,13 @@ +package client + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestClient(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg client Suite") +} diff --git a/pkg/client/client_test.go b/pkg/client/client_test.go new file mode 100644 index 00000000..72548f87 --- /dev/null +++ b/pkg/client/client_test.go @@ -0,0 +1,241 @@ +package client + +import ( + stderrors "errors" + "sync/atomic" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Clientxxx", func() { + It("NewClient", func() { + c := NewClient(WithAddress("127.0.0.1:9669")) + c1, ok := c.(*defaultClient) + Expect(ok).To(BeTrue()) + Expect(c1).NotTo(BeNil()) + Expect(c1.addresses).To(Equal([]string{"127.0.0.1:9669"})) + }) + + Describe(".Open", func() { + var ( + ctrl *gomock.Controller + mockSession *MockSession + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockSession = NewMockSession(ctrl) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("no addresses", func() { + c := NewClient() + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrNoAddresses)) + }) + + It("empty address", func() { + c := NewClient(WithAddress("")) + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrInvalidAddress)) + }) + + It("host empty", func() { + c := NewClient(WithAddress(":9669")) + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrInvalidAddress)) + }) + + It("port is not a number", func() { + c := NewClient(WithAddress("127.0.0.1:x")) + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrInvalidAddress)) + }) + + It("real nebula session", func() { + c := NewClient(WithAddress("127.0.0.1:0")) + err := c.Open() + Expect(err).To(HaveOccurred()) + }) + + It("open session failed", func() { + c := NewClient( + WithAddress("127.0.0.1:9669"), + WithNewSessionFunc(func(_ HostAddress) Session { + return mockSession + }), + ) + mockSession.EXPECT().Open().Return(stderrors.New("test open failed")) + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("test open failed"))) + }) + + It("clientInitFunc failed", func() { + c := NewClient( + WithAddress("127.0.0.1:9669"), + WithNewSessionFunc(func(_ HostAddress) Session { + return mockSession + }), + WithClientInitFunc(func(client Client) error { + return stderrors.New("test open failed") + }), + ) + mockSession.EXPECT().Open().Return(nil) + mockSession.EXPECT().Close().Return(nil) + err := c.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("test open failed"))) + }) + + It("successfully", func() { + c := NewClient( + WithAddress("127.0.0.1:9669"), + WithNewSessionFunc(func(_ HostAddress) Session { + return mockSession + }), + WithClientInitFunc(func(client Client) error { + return nil + }), + ) + mockSession.EXPECT().Open().Return(nil) + err := c.Open() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Describe(".Execute", func() { + var ( + c Client + ctrl *gomock.Controller + mockSession *MockSession + mockResponse *MockResponse + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockSession = NewMockSession(ctrl) + mockResponse = NewMockResponse(ctrl) + c = NewClient( + WithAddress("127.0.0.1:9669"), + WithRetryInitialInterval(time.Microsecond), + WithNewSessionFunc(func(_ HostAddress) Session { + return mockSession + }), + ) + + mockSession.EXPECT().Open().Return(nil) + err := c.Open() + Expect(err).NotTo(HaveOccurred()) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("retry case1", func() { + // * Case 1: retry no more + mockSession.EXPECT().Execute("test Execute statement").Times(1).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(1).Return(false) + mockResponse.EXPECT().GetError().Times(1).Return(stderrors.New("test error")) + mockResponse.EXPECT().IsPermanentError().Times(2).Return(true) + + resp, err := c.Execute("test Execute statement") + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + Expect(resp.IsPermanentError()).To(BeTrue()) + }) + + It("retry case2", func() { + retryTimes := DefaultRetry + 10 + var currExecuteTimes int64 + fnIsSucceed := func() bool { + curr := atomic.AddInt64(&currExecuteTimes, 1) + return curr > int64(retryTimes) + } + + // * Case 2. retry as much as possible + mockSession.EXPECT().Execute("test Execute statement").Times(retryTimes+1).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(retryTimes + 2).DoAndReturn(fnIsSucceed) + mockResponse.EXPECT().GetError().Times(retryTimes).Return(stderrors.New("test error")) + mockResponse.EXPECT().IsPermanentError().Times(retryTimes).Return(false) + mockResponse.EXPECT().IsRetryMoreError().Times(retryTimes).Return(true) + + resp, err := c.Execute("test Execute statement") + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + Expect(resp.IsSucceed()).To(BeTrue()) + }) + + It("retry case3", func() { + // * Case 3: retry with limit times + mockSession.EXPECT().Execute("test Execute statement").Times(DefaultRetry+1).Return(nil, stderrors.New("execute failed")) + + resp, err := c.Execute("test Execute statement") + Expect(err).To(HaveOccurred()) + Expect(resp).To(BeNil()) + }) + + It("successfully", func() { + mockSession.EXPECT().Execute("test Execute statement").Times(1).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(1).Return(true) + + resp, err := c.Execute("test Execute statement") + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + }) + }) + + Describe(".Close", func() { + var ( + c Client + ctrl *gomock.Controller + mockSession *MockSession + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockSession = NewMockSession(ctrl) + c = NewClient( + WithAddress("127.0.0.1:9669"), + WithRetryInitialInterval(time.Microsecond), + WithNewSessionFunc(func(_ HostAddress) Session { + return mockSession + }), + ) + + mockSession.EXPECT().Open().Return(nil) + err := c.Open() + Expect(err).NotTo(HaveOccurred()) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("open session failed", func() { + mockSession.EXPECT().Close().Return(stderrors.New("open session failed")) + err := c.Close() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("open session failed"))) + }) + + It("successfully", func() { + mockSession.EXPECT().Close().Return(nil) + err := c.Close() + Expect(err).NotTo(HaveOccurred()) + }) + }) +}) diff --git a/pkg/client/clientmgr.go b/pkg/client/clientmgr.go deleted file mode 100644 index 2e6f15a5..00000000 --- a/pkg/client/clientmgr.go +++ /dev/null @@ -1,48 +0,0 @@ -package client - -import ( - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type NebulaClientMgr struct { - config *config.NebulaClientSettings - pool *ClientPool - runnerLogger *logger.RunnerLogger -} - -func NewNebulaClientMgr(settings *config.NebulaClientSettings, statsCh chan<- base.Stats, - runnerLogger *logger.RunnerLogger) (*NebulaClientMgr, error) { - mgr := NebulaClientMgr{ - config: settings, - runnerLogger: runnerLogger, - } - - if pool, err := NewClientPool(settings, statsCh, runnerLogger); err != nil { - return nil, err - } else { - if err := pool.Init(); err != nil { - return nil, err - } - mgr.pool = pool - } - - logger.Log.Infof("Create %d Nebula Graph clients", mgr.GetNumConnections()) - - return &mgr, nil -} - -func (m *NebulaClientMgr) Close() { - m.runnerLogger.Infof("Client manager closing") - m.pool.Close() - m.runnerLogger.Infof("Client manager closed") -} - -func (m *NebulaClientMgr) GetRequestChans() []chan base.ClientRequest { - return m.pool.requestChs -} - -func (m *NebulaClientMgr) GetNumConnections() int { - return len(m.pool.requestChs) -} diff --git a/pkg/client/clientpool.go b/pkg/client/clientpool.go deleted file mode 100644 index 41706f48..00000000 --- a/pkg/client/clientpool.go +++ /dev/null @@ -1,262 +0,0 @@ -package client - -import ( - "fmt" - "strconv" - "strings" - "time" - - "github.com/cenkalti/backoff/v4" - nebula "github.com/vesoft-inc/nebula-go/v3" - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -const ( - DefaultRetryInitialInterval = time.Second - DefaultRetryRandomizationFactor = 0.1 - DefaultRetryMultiplier = 1.5 - DefaultRetryMaxInterval = 2 * time.Minute - DefaultRetryMaxElapsedTime = time.Hour -) - -type ClientPool struct { - retry int - concurrency int - space string - postStart *config.NebulaPostStart - preStop *config.NebulaPreStop - statsCh chan<- base.Stats - pool *nebula.ConnectionPool - Sessions []*nebula.Session - requestChs []chan base.ClientRequest - runnerLogger *logger.RunnerLogger -} - -func NewClientPool(settings *config.NebulaClientSettings, statsCh chan<- base.Stats, runnerLogger *logger.RunnerLogger) (*ClientPool, error) { - addrs := strings.Split(*settings.Connection.Address, ",") - var hosts []nebula.HostAddress - for _, addr := range addrs { - hostPort := strings.Split(addr, ":") - if len(hostPort) != 2 { - return nil, fmt.Errorf("Invalid address: %s", addr) - } - port, err := strconv.Atoi(hostPort[1]) - if err != nil { - return nil, err - } - hostAddr := nebula.HostAddress{Host: hostPort[0], Port: port} - hosts = append(hosts, hostAddr) - } - conf := nebula.PoolConfig{ - TimeOut: 0, - IdleTime: 0, - MaxConnPoolSize: len(addrs) * *settings.Concurrency, - MinConnPoolSize: 1, - } - connPool, err := nebula.NewConnectionPool(hosts, conf, logger.NewNebulaLogger(runnerLogger)) - if err != nil { - return nil, err - } - pool := ClientPool{ - space: *settings.Space, - postStart: settings.PostStart, - preStop: settings.PreStop, - statsCh: statsCh, - pool: connPool, - runnerLogger: runnerLogger, - } - pool.retry = *settings.Retry - pool.concurrency = (*settings.Concurrency) * len(addrs) - pool.Sessions = make([]*nebula.Session, pool.concurrency) - pool.requestChs = make([]chan base.ClientRequest, pool.concurrency) - - j := 0 - for k := 0; k < len(addrs); k++ { - for i := 0; i < *settings.Concurrency; i++ { - if pool.Sessions[j], err = pool.pool.GetSession(*settings.Connection.User, *settings.Connection.Password); err != nil { - return nil, err - } - pool.requestChs[j] = make(chan base.ClientRequest, *settings.ChannelBufferSize) - j++ - } - } - - return &pool, nil -} - -func (p *ClientPool) getActiveConnIdx() int { - for i := range p.Sessions { - if p.Sessions[i] != nil { - return i - } - } - return -1 -} - -func (p *ClientPool) exec(i int, stmt string) error { - if len(stmt) == 0 { - return nil - } - resp, err := p.Sessions[i].Execute(stmt) - if err != nil { - return fmt.Errorf("Client(%d) fails to execute commands (%s), error: %s", i, stmt, err.Error()) - } - - if !resp.IsSucceed() { - return fmt.Errorf("Client(%d) fails to execute commands (%s), response error code: %v, message: %s", - i, stmt, resp.GetErrorCode(), resp.GetErrorMsg()) - } - - return nil -} - -func (p *ClientPool) Close() { - if p.preStop != nil && p.preStop.Commands != nil { - if i := p.getActiveConnIdx(); i != -1 { - if err := p.exec(i, *p.preStop.Commands); err != nil { - logger.Log.Errorf("%s", err.Error()) - } - } - } - - for i := 0; i < p.concurrency; i++ { - if p.Sessions[i] != nil { - p.Sessions[i].Release() - } - if p.requestChs[i] != nil { - close(p.requestChs[i]) - } - } - p.pool.Close() -} - -func (p *ClientPool) Init() error { - i := p.getActiveConnIdx() - if i == -1 { - return fmt.Errorf("no available session.") - } - if p.postStart != nil && p.postStart.Commands != nil { - if err := p.exec(i, *p.postStart.Commands); err != nil { - return err - } - } - - if p.postStart != nil { - afterPeriod, _ := time.ParseDuration(*p.postStart.AfterPeriod) - time.Sleep(afterPeriod) - } - - // pre-check for use space statement - if err := p.exec(i, fmt.Sprintf("USE `%s`;", p.space)); err != nil { - return err - } - - for i := 0; i < p.concurrency; i++ { - go func(i int) { - p.startWorker(i) - }(i) - } - return nil -} - -func (p *ClientPool) startWorker(i int) { - stmt := fmt.Sprintf("USE `%s`;", p.space) - if err := p.exec(i, stmt); err != nil { - logger.Log.Error(err.Error()) - return - } - for { - data, ok := <-p.requestChs[i] - if !ok { - break - } - - if data.Stmt == base.STAT_FILEDONE { - data.ErrCh <- base.ErrData{Error: nil} - continue - } - - now := time.Now() - - exp := backoff.NewExponentialBackOff() - exp.InitialInterval = DefaultRetryInitialInterval - exp.RandomizationFactor = DefaultRetryRandomizationFactor - exp.Multiplier = DefaultRetryMultiplier - exp.MaxInterval = DefaultRetryMaxInterval - exp.MaxElapsedTime = DefaultRetryMaxElapsedTime - - var ( - err error - resp *nebula.ResultSet - retry = p.retry - ) - - // There are three cases of retry - // * Case 1: retry no more - // * Case 2. retry as much as possible - // * Case 3: retry with limit times - _ = backoff.Retry(func() error { - resp, err = p.Sessions[i].Execute(data.Stmt) - if err == nil && resp.IsSucceed() { - return nil - } - retryErr := err - if resp != nil { - errorCode, errorMsg := resp.GetErrorCode(), resp.GetErrorMsg() - retryErr = fmt.Errorf("%d:%s", errorCode, errorMsg) - - // Case 1: retry no more - var isPermanentError = true - switch errorCode { - case nebula.ErrorCode_E_SYNTAX_ERROR: - case nebula.ErrorCode_E_SEMANTIC_ERROR: - default: - isPermanentError = false - } - if isPermanentError { - // stop the retry - return backoff.Permanent(retryErr) - } - - // Case 2. retry as much as possible - // TODO: compare with E_RAFT_BUFFER_OVERFLOW - // Can not get the E_RAFT_BUFFER_OVERFLOW inside storage now. - if strings.Contains(errorMsg, "raft buffer is full") { - retry = p.retry - return retryErr - } - } - // Case 3: retry with limit times - if retry <= 0 { - // stop the retry - return backoff.Permanent(retryErr) - } - retry-- - return retryErr - }, exp) - - if err != nil { - err = fmt.Errorf("Client %d fail to execute: %s, Error: %s", i, data.Stmt, err.Error()) - } else { - if !resp.IsSucceed() { - err = fmt.Errorf("Client %d fail to execute: %s, ErrMsg: %s, ErrCode: %v", i, data.Stmt, resp.GetErrorMsg(), resp.GetErrorCode()) - } - } - - if err != nil { - data.ErrCh <- base.ErrData{ - Error: err, - Data: data.Data, - } - } else { - timeInMs := time.Since(now).Nanoseconds() / 1e3 - var importedBytes int64 - for _, d := range data.Data { - importedBytes += int64(d.Bytes) - } - p.statsCh <- base.NewSuccessStats(int64(resp.GetLatency()), timeInMs, len(data.Data), importedBytes) - } - } -} diff --git a/pkg/client/errors.go b/pkg/client/errors.go new file mode 100644 index 00000000..6c7b2e7d --- /dev/null +++ b/pkg/client/errors.go @@ -0,0 +1,5 @@ +package client + +import stderrors "errors" + +var ErrClosed = stderrors.New("client closed") diff --git a/pkg/client/logger.go b/pkg/client/logger.go new file mode 100644 index 00000000..20a99e95 --- /dev/null +++ b/pkg/client/logger.go @@ -0,0 +1,27 @@ +package client + +import ( + nebula "github.com/vesoft-inc/nebula-go/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" +) + +var _ nebula.Logger = nebulaLogger{} + +type nebulaLogger struct { + l logger.Logger +} + +func newNebulaLogger(l logger.Logger) nebula.Logger { + return nebulaLogger{ + l: l, + } +} + +//revive:disable:empty-lines + +func (l nebulaLogger) Info(msg string) { l.l.Info(msg) } +func (l nebulaLogger) Warn(msg string) { l.l.Warn(msg) } +func (l nebulaLogger) Error(msg string) { l.l.Error(msg) } +func (l nebulaLogger) Fatal(msg string) { l.l.Fatal(msg) } + +//revive:enable:empty-lines diff --git a/pkg/client/logger_test.go b/pkg/client/logger_test.go new file mode 100644 index 00000000..27d1b237 --- /dev/null +++ b/pkg/client/logger_test.go @@ -0,0 +1,17 @@ +package client + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + + . "github.com/onsi/ginkgo/v2" +) + +var _ = Describe("nebulaLogger", func() { + It("newNebulaLogger", func() { + l := newNebulaLogger(logger.NopLogger) + l.Info("") + l.Warn("") + l.Error("") + l.Fatal("") + }) +}) diff --git a/pkg/client/option.go b/pkg/client/option.go new file mode 100644 index 00000000..9a582195 --- /dev/null +++ b/pkg/client/option.go @@ -0,0 +1,180 @@ +package client + +import ( + "strings" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" +) + +const ( + DefaultUser = "root" + DefaultPassword = "nebula" + DefaultReconnectInitialInterval = time.Second + DefaultReconnectMaxInterval = 2 * time.Minute + DefaultRetry = 3 + DefaultRetryInitialInterval = time.Second + DefaultRetryMaxInterval = 2 * time.Minute + DefaultRetryRandomizationFactor = 0.1 + DefaultRetryMultiplier = 1.5 + DefaultRetryMaxElapsedTime = time.Hour + DefaultConcurrencyPerAddress = 10 + DefaultQueueSize = 1000 +) + +type ( + Option func(*options) + + options struct { + // for client + addresses []string + user string + password string + retry int + retryInitialInterval time.Duration + logger logger.Logger + fnNewSession NewSessionFunc + clientInitFunc func(Client) error + // for pool + reconnectInitialInterval time.Duration + concurrencyPerAddress int + queueSize int + fnNewClientWithOptions func(o *options) Client // for convenience of testing in Pool + } +) + +func WithV3() Option { + return func(c *options) { + WithNewSessionFunc(func(hostAddress HostAddress) Session { + return newSessionV3(hostAddress, c.user, c.password, c.logger) + })(c) + } +} + +func WithAddress(addresses ...string) Option { + return func(c *options) { + for _, addr := range addresses { + if strings.IndexByte(addr, ',') != -1 { + c.addresses = append(c.addresses, strings.Split(addr, ",")...) + } else { + c.addresses = append(c.addresses, addr) + } + } + } +} + +func WithUser(user string) Option { + return func(c *options) { + c.user = user + } +} + +func WithPassword(password string) Option { + return func(c *options) { + c.password = password + } +} + +func WithUserPassword(user, password string) Option { + return func(c *options) { + WithUser(user)(c) + WithPassword(password)(c) + } +} + +func WithRetry(retry int) Option { + return func(c *options) { + if retry > 0 { + c.retry = retry + } + } +} + +func WithRetryInitialInterval(interval time.Duration) Option { + return func(c *options) { + if interval > 0 { + c.retryInitialInterval = interval + } + } +} + +func WithLogger(l logger.Logger) Option { + return func(m *options) { + m.logger = l + } +} + +func WithNewSessionFunc(fn NewSessionFunc) Option { + return func(m *options) { + m.fnNewSession = fn + } +} + +func WithClientInitFunc(fn func(Client) error) Option { + return func(c *options) { + c.clientInitFunc = fn + } +} + +func WithReconnectInitialInterval(interval time.Duration) Option { + return func(c *options) { + if interval > 0 { + c.reconnectInitialInterval = interval + } + } +} + +func WithConcurrencyPerAddress(concurrencyPerAddress int) Option { + return func(c *options) { + if concurrencyPerAddress > 0 { + c.concurrencyPerAddress = concurrencyPerAddress + } + } +} + +func WithQueueSize(queueSize int) Option { + return func(c *options) { + if queueSize > 0 { + c.queueSize = queueSize + } + } +} + +func newOptions(opts ...Option) *options { + var defaultOptions = &options{ + user: DefaultUser, + password: DefaultPassword, + reconnectInitialInterval: DefaultReconnectInitialInterval, + retry: DefaultRetry, + retryInitialInterval: DefaultRetryInitialInterval, + concurrencyPerAddress: DefaultConcurrencyPerAddress, + queueSize: DefaultQueueSize, + } + + defaultOptions.withOptions(opts...) + + return defaultOptions +} + +func (o *options) withOptions(opts ...Option) { + for _, opt := range opts { + opt(o) + } + + if o.logger == nil { + o.logger = logger.NopLogger + } + + if o.fnNewSession == nil { + WithV3()(o) + } + + if o.fnNewClientWithOptions == nil { + o.fnNewClientWithOptions = newClientWithOptions + } +} + +func (o *options) clone() *options { + cpy := *o + return &cpy +} diff --git a/pkg/client/option_test.go b/pkg/client/option_test.go new file mode 100644 index 00000000..ad11f018 --- /dev/null +++ b/pkg/client/option_test.go @@ -0,0 +1,131 @@ +package client + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Option", func() { + It("newOptions", func() { + o := newOptions() + Expect(o).NotTo(BeNil()) + + Expect(o.addresses).To(Equal([]string(nil))) + Expect(o.user).To(Equal(DefaultUser)) + Expect(o.password).To(Equal(DefaultPassword)) + Expect(o.retry).To(Equal(DefaultRetry)) + Expect(o.retryInitialInterval).To(Equal(DefaultRetryInitialInterval)) + Expect(o.logger).NotTo(BeNil()) + Expect(o.fnNewSession).NotTo(BeNil()) + Expect(o.clientInitFunc).To(BeNil()) + Expect(o.reconnectInitialInterval).To(Equal(DefaultReconnectInitialInterval)) + Expect(o.concurrencyPerAddress).To(Equal(DefaultConcurrencyPerAddress)) + Expect(o.queueSize).To(Equal(DefaultQueueSize)) + Expect(o.fnNewClientWithOptions).NotTo(BeNil()) + + o1 := o.clone() + Expect(o1.addresses).To(Equal([]string(nil))) + Expect(o1.user).To(Equal(DefaultUser)) + Expect(o1.password).To(Equal(DefaultPassword)) + Expect(o1.retry).To(Equal(DefaultRetry)) + Expect(o1.retryInitialInterval).To(Equal(DefaultRetryInitialInterval)) + Expect(o1.logger).NotTo(BeNil()) + Expect(o1.fnNewSession).NotTo(BeNil()) + Expect(o1.clientInitFunc).To(BeNil()) + Expect(o1.reconnectInitialInterval).To(Equal(DefaultReconnectInitialInterval)) + Expect(o1.concurrencyPerAddress).To(Equal(DefaultConcurrencyPerAddress)) + Expect(o1.queueSize).To(Equal(DefaultQueueSize)) + Expect(o.fnNewClientWithOptions).NotTo(BeNil()) + + o1.addresses = []string{"127.0.0.1:9669"} + Expect(o.addresses).To(Equal([]string(nil))) + Expect(o1.addresses).To(Equal([]string{"127.0.0.1:9669"})) + }) + + It("withXXX", func() { + o := newOptions( + WithV3(), + WithAddress("127.0.0.1:9669"), + WithAddress("127.0.0.2:9669,127.0.0.3:9669"), + WithAddress("127.0.0.4:9669,127.0.0.5:9669", "127.0.0.6:9669"), + WithUser("u0"), + WithPassword("p0"), + WithUserPassword("newUser", "newPassword"), + WithRetry(DefaultRetry-1), + WithRetry(DefaultRetry+1), + WithRetryInitialInterval(DefaultRetryInitialInterval-1), + WithRetryInitialInterval(DefaultRetryInitialInterval+1), + WithLogger(logger.NopLogger), + WithNewSessionFunc(func(HostAddress) Session { return nil }), + WithClientInitFunc(func(Client) error { return nil }), + WithReconnectInitialInterval(DefaultReconnectInitialInterval-1), + WithReconnectInitialInterval(DefaultReconnectInitialInterval+1), + WithConcurrencyPerAddress(DefaultConcurrencyPerAddress-1), + WithConcurrencyPerAddress(DefaultConcurrencyPerAddress+1), + WithQueueSize(DefaultQueueSize-1), + WithQueueSize(DefaultQueueSize+1), + ) + Expect(o).NotTo(BeNil()) + Expect(o.addresses).To(Equal([]string{ + "127.0.0.1:9669", + "127.0.0.2:9669", + "127.0.0.3:9669", + "127.0.0.4:9669", + "127.0.0.5:9669", + "127.0.0.6:9669", + })) + Expect(o.user).To(Equal("newUser")) + Expect(o.password).To(Equal("newPassword")) + Expect(o.retry).To(Equal(DefaultRetry + 1)) + Expect(o.retryInitialInterval).To(Equal(DefaultRetryInitialInterval + 1)) + Expect(o.logger).NotTo(BeNil()) + Expect(o.fnNewSession).NotTo(BeNil()) + Expect(o.clientInitFunc).NotTo(BeNil()) + Expect(o.reconnectInitialInterval).To(Equal(DefaultReconnectInitialInterval + 1)) + Expect(o.concurrencyPerAddress).To(Equal(DefaultConcurrencyPerAddress + 1)) + Expect(o.queueSize).To(Equal(DefaultQueueSize + 1)) + Expect(o.fnNewClientWithOptions).NotTo(BeNil()) + + o1 := o.clone() + Expect(o1).NotTo(BeNil()) + Expect(o1.addresses).To(Equal([]string{ + "127.0.0.1:9669", + "127.0.0.2:9669", + "127.0.0.3:9669", + "127.0.0.4:9669", + "127.0.0.5:9669", + "127.0.0.6:9669", + })) + Expect(o1.user).To(Equal("newUser")) + Expect(o1.password).To(Equal("newPassword")) + Expect(o1.retry).To(Equal(DefaultRetry + 1)) + Expect(o1.retryInitialInterval).To(Equal(DefaultRetryInitialInterval + 1)) + Expect(o1.logger).NotTo(BeNil()) + Expect(o1.fnNewSession).NotTo(BeNil()) + Expect(o1.clientInitFunc).NotTo(BeNil()) + Expect(o1.reconnectInitialInterval).To(Equal(DefaultReconnectInitialInterval + 1)) + Expect(o1.concurrencyPerAddress).To(Equal(DefaultConcurrencyPerAddress + 1)) + Expect(o1.queueSize).To(Equal(DefaultQueueSize + 1)) + Expect(o.fnNewClientWithOptions).NotTo(BeNil()) + + o1.addresses = []string{"127.0.0.1:9669"} + Expect(o.addresses).To(Equal([]string{ + "127.0.0.1:9669", + "127.0.0.2:9669", + "127.0.0.3:9669", + "127.0.0.4:9669", + "127.0.0.5:9669", + "127.0.0.6:9669", + })) + Expect(o1.addresses).To(Equal([]string{"127.0.0.1:9669"})) + }) + + It("fnNewSession v3", func() { + o := newOptions(WithV3()) + s := o.fnNewSession(HostAddress{}) + _, ok := s.(*defaultSessionV3) + Expect(ok).To(BeTrue()) + }) +}) diff --git a/pkg/client/pool.go b/pkg/client/pool.go new file mode 100644 index 00000000..4aa5df83 --- /dev/null +++ b/pkg/client/pool.go @@ -0,0 +1,210 @@ +//go:generate mockgen -source=pool.go -destination pool_mock.go -package client -aux_files github.com/vesoft-inc/nebula-importer/v4/pkg/client=client.go Pool +package client + +import ( + "sync" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/cenkalti/backoff/v4" +) + +type ( + Pool interface { + Client + GetClient(opts ...Option) (Client, error) + ExecuteChan(statement string) (<-chan ExecuteResult, bool) + } + + defaultPool struct { + *options + chExecuteDataQueue chan executeData + lock sync.RWMutex + closed bool + done chan struct{} + wgSession sync.WaitGroup + wgStatementExecute sync.WaitGroup + } + + NewSessionFunc func(HostAddress) Session + + executeData struct { + statement string + ch chan<- ExecuteResult + } + + ExecuteResult struct { + Response Response + Err error + } +) + +func NewPool(opts ...Option) Pool { + p := &defaultPool{ + options: newOptions(opts...), + done: make(chan struct{}), + } + + p.chExecuteDataQueue = make(chan executeData, p.queueSize) + + return p +} + +func (p *defaultPool) GetClient(opts ...Option) (Client, error) { + if len(p.addresses) == 0 { + return nil, errors.ErrNoAddresses + } + return p.openClient(p.addresses[0], opts...) +} + +func (p *defaultPool) Open() error { + if len(p.addresses) == 0 { + return errors.ErrNoAddresses + } + + for _, address := range p.addresses { + // check if it can open successfully. + c, err := p.openClient(address) + if err != nil { + return err + } + _ = c.Close() + } + + p.startWorkers() + + return nil +} + +func (p *defaultPool) Execute(statement string) (Response, error) { + if p.IsClosed() { + return nil, ErrClosed + } + p.wgStatementExecute.Add(1) + defer p.wgStatementExecute.Done() + + ch := make(chan ExecuteResult, 1) + data := executeData{ + statement: statement, + ch: ch, + } + p.chExecuteDataQueue <- data + result := <-ch + return result.Response, result.Err +} + +func (p *defaultPool) ExecuteChan(statement string) (<-chan ExecuteResult, bool) { + if p.IsClosed() { + return nil, false + } + p.wgStatementExecute.Add(1) + defer p.wgStatementExecute.Done() + + ch := make(chan ExecuteResult, 1) + data := executeData{ + statement: statement, + ch: ch, + } + select { + case p.chExecuteDataQueue <- data: + return ch, true + default: + return nil, false + } +} + +func (p *defaultPool) Close() error { + p.lock.Lock() + p.closed = true + p.lock.Unlock() + + p.wgStatementExecute.Wait() + close(p.done) + p.wgSession.Wait() + close(p.chExecuteDataQueue) + return nil +} + +func (p *defaultPool) IsClosed() bool { + p.lock.RLock() + defer p.lock.RUnlock() + return p.closed +} + +func (p *defaultPool) startWorkers() { + for _, address := range p.addresses { + address := address + for i := 0; i < p.concurrencyPerAddress; i++ { + p.wgSession.Add(1) + go func() { + defer p.wgSession.Done() + p.worker(address) + }() + } + } +} + +func (p *defaultPool) worker(address string) { + for { + select { + case <-p.done: + return + default: + exp := backoff.NewExponentialBackOff() + exp.InitialInterval = p.reconnectInitialInterval + exp.MaxInterval = DefaultReconnectMaxInterval + exp.RandomizationFactor = DefaultRetryRandomizationFactor + exp.Multiplier = DefaultRetryMultiplier + + var ( + err error + c Client + ) + _ = backoff.Retry(func() error { + c, err = p.openClient(address) + if err != nil { + p.logger.WithError(err).Error("open client failed") + } + return err + }, exp) + + if err == nil { + p.loop(c) + } + } + } +} + +func (p *defaultPool) openClient(address string, opts ...Option) (Client, error) { + cloneOptions := p.options.clone() + cloneOptions.addresses = []string{address} + cloneOptions.withOptions(opts...) + + c := p.fnNewClientWithOptions(cloneOptions) + if err := c.Open(); err != nil { + return nil, err + } + + return c, nil +} + +func (p *defaultPool) loop(c Client) { + defer func() { + _ = c.Close() + }() + for { + select { + case data, ok := <-p.chExecuteDataQueue: + if !ok { + continue + } + resp, err := c.Execute(data.statement) + data.ch <- ExecuteResult{ + Response: resp, + Err: err, + } + case <-p.done: + return + } + } +} diff --git a/pkg/client/pool_mock.go b/pkg/client/pool_mock.go new file mode 100644 index 00000000..a5a3ebbf --- /dev/null +++ b/pkg/client/pool_mock.go @@ -0,0 +1,111 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: pool.go + +// Package client is a generated GoMock package. +package client + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockPool is a mock of Pool interface. +type MockPool struct { + ctrl *gomock.Controller + recorder *MockPoolMockRecorder +} + +// MockPoolMockRecorder is the mock recorder for MockPool. +type MockPoolMockRecorder struct { + mock *MockPool +} + +// NewMockPool creates a new mock instance. +func NewMockPool(ctrl *gomock.Controller) *MockPool { + mock := &MockPool{ctrl: ctrl} + mock.recorder = &MockPoolMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPool) EXPECT() *MockPoolMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockPool) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockPoolMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockPool)(nil).Close)) +} + +// Execute mocks base method. +func (m *MockPool) Execute(statement string) (Response, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Execute", statement) + ret0, _ := ret[0].(Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Execute indicates an expected call of Execute. +func (mr *MockPoolMockRecorder) Execute(statement interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Execute", reflect.TypeOf((*MockPool)(nil).Execute), statement) +} + +// ExecuteChan mocks base method. +func (m *MockPool) ExecuteChan(statement string) (<-chan ExecuteResult, bool) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ExecuteChan", statement) + ret0, _ := ret[0].(<-chan ExecuteResult) + ret1, _ := ret[1].(bool) + return ret0, ret1 +} + +// ExecuteChan indicates an expected call of ExecuteChan. +func (mr *MockPoolMockRecorder) ExecuteChan(statement interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ExecuteChan", reflect.TypeOf((*MockPool)(nil).ExecuteChan), statement) +} + +// GetClient mocks base method. +func (m *MockPool) GetClient(opts ...Option) (Client, error) { + m.ctrl.T.Helper() + varargs := []interface{}{} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetClient", varargs...) + ret0, _ := ret[0].(Client) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetClient indicates an expected call of GetClient. +func (mr *MockPoolMockRecorder) GetClient(opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClient", reflect.TypeOf((*MockPool)(nil).GetClient), opts...) +} + +// Open mocks base method. +func (m *MockPool) Open() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open") + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *MockPoolMockRecorder) Open() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockPool)(nil).Open)) +} diff --git a/pkg/client/pool_test.go b/pkg/client/pool_test.go new file mode 100644 index 00000000..ec7c7ec9 --- /dev/null +++ b/pkg/client/pool_test.go @@ -0,0 +1,383 @@ +package client + +import ( + stderrors "errors" + "sync" + "sync/atomic" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Pool", func() { + It("NewPool", func() { + p := NewPool(WithAddress("127.0.0.1:9669")) + p1, ok := p.(*defaultPool) + Expect(ok).To(BeTrue()) + Expect(p1).NotTo(BeNil()) + Expect(p1.addresses).To(Equal([]string{"127.0.0.1:9669"})) + Expect(p1.done).NotTo(BeNil()) + Expect(p1.chExecuteDataQueue).NotTo(BeNil()) + }) + + Describe(".GetClient", func() { + var ( + ctrl *gomock.Controller + mockClient *MockClient + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockClient = NewMockClient(ctrl) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("no addresses", func() { + p := NewPool() + c, err := p.GetClient() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrNoAddresses)) + Expect(c).To(BeNil()) + }) + + It("open client failed", func() { + mockClient.EXPECT().Open().Return(stderrors.New("open client failed")) + + p := NewPool( + WithAddress("127.0.0.1:9669", "127.0.0.2:9669"), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + Expect(o.addresses).To(Equal([]string{"127.0.0.1:9669"})) + return mockClient + } + }, + ) + c, err := p.GetClient() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("open client failed"))) + Expect(c).To(BeNil()) + }) + + It("successfully", func() { + mockClient.EXPECT().Open().Return(nil) + + p := NewPool( + WithAddress("127.0.0.1:9669", "127.0.0.2:9669"), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + Expect(o.addresses).To(Equal([]string{"127.0.0.1:9669"})) + return mockClient + } + }, + ) + c, err := p.GetClient() + Expect(err).NotTo(HaveOccurred()) + Expect(c).NotTo(BeNil()) + }) + }) + + Describe(".Open", func() { + var ( + ctrl *gomock.Controller + mockClient *MockClient + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockClient = NewMockClient(ctrl) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("no addresses", func() { + pool := NewPool() + err := pool.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(errors.ErrNoAddresses)) + }) + + It("open Client failed", func() { + pool := NewPool( + WithAddress("127.0.0.1:9669"), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + mockClient.EXPECT().Open().Return(stderrors.New("open client failed")) + + err := pool.Open() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("open client failed"))) + }) + + It("start workers successfully", func() { + addresses := []string{"127.0.0.1:9669", "127.0.0.2:9669"} + pool := NewPool( + WithAddress(addresses...), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + var ( + // 1 for check and DefaultConcurrencyPerAddress for concurrency per address + clientOpenTimes = (1 + DefaultConcurrencyPerAddress) * len(addresses) + wg sync.WaitGroup + ) + + wg.Add(clientOpenTimes) + mockClient.EXPECT().Open().Times(clientOpenTimes).DoAndReturn(func() error { + defer wg.Done() + return nil + }) + mockClient.EXPECT().Close().Times(clientOpenTimes).Return(nil) + + err := pool.Open() + Expect(err).NotTo(HaveOccurred()) + + wg.Wait() + + err = pool.Close() + Expect(err).NotTo(HaveOccurred()) + + resp, err := pool.Execute("test Execute statement") + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(ErrClosed)) + Expect(resp).To(BeNil()) + + chExecuteResult, ok := pool.ExecuteChan("test ExecuteChan statement") + Expect(ok).To(BeFalse()) + Expect(chExecuteResult).To(BeNil()) + }) + + It("start workers temporary failure", func() { + addresses := []string{"127.0.0.1:9669", "127.0.0.2:9669"} + pool := NewPool( + WithAddress(addresses...), + WithReconnectInitialInterval(time.Nanosecond), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + var ( + // 1 for check and DefaultConcurrencyPerAddress for concurrency per address + clientOpenTimes = (1 + DefaultConcurrencyPerAddress) * len(addresses) + wg sync.WaitGroup + openTimes int64 + failedOpenTimes = 10 + ) + + wg.Add(clientOpenTimes + failedOpenTimes) + fnOpen := func() error { + defer wg.Done() + curr := atomic.AddInt64(&openTimes, 1) + if curr >= int64(1+DefaultConcurrencyPerAddress)+1 && + curr < int64(1+DefaultConcurrencyPerAddress)+1+int64(failedOpenTimes) { + return stderrors.New("test start worker temporary failure") + } + return nil + } + + mockClient.EXPECT().Open().Times(clientOpenTimes + failedOpenTimes).DoAndReturn(fnOpen) + mockClient.EXPECT().Close().Times(clientOpenTimes).Return(nil) + + err := pool.Open() + Expect(err).NotTo(HaveOccurred()) + + wg.Wait() + + err = pool.Close() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Describe(".Execute&.ExecuteChan", func() { + var ( + ctrl *gomock.Controller + mockClient *MockClient + mockResponse *MockResponse + ) + + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockClient = NewMockClient(ctrl) + mockResponse = NewMockResponse(ctrl) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + It("blocked at ExecuteChan", func() { + addresses := []string{"127.0.0.1:9669"} + pool := NewPool( + WithAddress(addresses...), + WithConcurrencyPerAddress(1), + WithQueueSize(1), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + var ( + // 1 for check and DefaultConcurrencyPerAddress for concurrency per address + clientOpenTimes = (1 + 1) * len(addresses) + wg sync.WaitGroup + wait = make(chan struct{}) + ) + + wg.Add(clientOpenTimes) + fnExecute := func(_ string) (Response, error) { + <-wait + return mockResponse, nil + } + + mockClient.EXPECT().Open().Times(clientOpenTimes).DoAndReturn(func() error { + defer wg.Done() + return nil + }) + mockClient.EXPECT().Execute("test ExecuteChan statement").Times(1).DoAndReturn(fnExecute) + mockClient.EXPECT().Close().Times(clientOpenTimes).Return(nil) + + err := pool.Open() + Expect(err).NotTo(HaveOccurred()) + + chExecuteResult, ok := pool.ExecuteChan("test ExecuteChan statement") + Expect(ok).To(BeTrue()) + Expect(chExecuteResult).NotTo(BeNil()) + + chExecuteResult, ok = pool.ExecuteChan("test ExecuteChan statement") + Expect(ok).To(BeFalse()) + Expect(chExecuteResult).To(BeNil()) + + close(wait) + + wg.Wait() + + err = pool.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("chExecuteDataQueue closed", func() { + addresses := []string{"127.0.0.1:9669", "127.0.0.2:9669"} + pool := NewPool( + WithAddress(addresses...), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + var ( + // 1 for check and DefaultConcurrencyPerAddress for concurrency per address + clientOpenTimes = (1 + DefaultConcurrencyPerAddress) * len(addresses) + wg sync.WaitGroup + ) + + wg.Add(clientOpenTimes) + + mockClient.EXPECT().Open().Times(clientOpenTimes).DoAndReturn(func() error { + defer wg.Done() + return nil + }) + mockClient.EXPECT().Close().Times(clientOpenTimes).Return(nil) + + err := pool.Open() + Expect(err).NotTo(HaveOccurred()) + + pool1 := pool.(*defaultPool) + close(pool1.chExecuteDataQueue) + + wg.Wait() + + close(pool1.done) + pool1.wgSession.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("concurrency", func() { + var ( + addresses = []string{"127.0.0.1:9669", "127.0.0.2:9669"} + executeTimes = 1000 + ) + + pool := NewPool( + WithAddress(addresses...), + WithQueueSize(executeTimes*2), + func(o *options) { + o.fnNewClientWithOptions = func(o *options) Client { + return mockClient + } + }, + ) + + var ( + // 1 for check and DefaultConcurrencyPerAddress for concurrency per address + clientOpenTimes = (1 + DefaultConcurrencyPerAddress) * len(addresses) + wg sync.WaitGroup + ) + wg.Add(clientOpenTimes) + + mockClient.EXPECT().Open().Times(clientOpenTimes).DoAndReturn(func() error { + defer wg.Done() + return nil + }) + mockClient.EXPECT().Execute("test Execute statement").Times(executeTimes).Return(mockResponse, nil) + mockClient.EXPECT().Execute("test ExecuteChan statement").Times(executeTimes).Return(mockResponse, nil) + mockClient.EXPECT().Close().Times(clientOpenTimes).Return(nil) + + err := pool.Open() + Expect(err).NotTo(HaveOccurred()) + + var wgExecutes sync.WaitGroup + for i := 0; i < executeTimes; i++ { + wgExecutes.Add(1) + go func() { + defer GinkgoRecover() + defer wgExecutes.Done() + resp, err := pool.Execute("test Execute statement") + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + }() + + wgExecutes.Add(1) + go func() { + defer GinkgoRecover() + defer wgExecutes.Done() + chExecuteResult, ok := pool.ExecuteChan("test ExecuteChan statement") + Expect(ok).To(BeTrue()) + executeResult := <-chExecuteResult + resp, err := executeResult.Response, executeResult.Err + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + }() + } + wgExecutes.Wait() + + wg.Wait() + + err = pool.Close() + Expect(err).NotTo(HaveOccurred()) + }) + }) +}) diff --git a/pkg/client/response.go b/pkg/client/response.go new file mode 100644 index 00000000..ce88eafb --- /dev/null +++ b/pkg/client/response.go @@ -0,0 +1,15 @@ +//go:generate mockgen -source=response.go -destination response_mock.go -package client Response +package client + +import ( + "time" +) + +type Response interface { + IsSucceed() bool + GetLatency() time.Duration + GetRespTime() time.Duration + GetError() error + IsPermanentError() bool + IsRetryMoreError() bool +} diff --git a/pkg/client/response_mock.go b/pkg/client/response_mock.go new file mode 100644 index 00000000..cd2c547c --- /dev/null +++ b/pkg/client/response_mock.go @@ -0,0 +1,119 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: response.go + +// Package client is a generated GoMock package. +package client + +import ( + reflect "reflect" + time "time" + + gomock "github.com/golang/mock/gomock" +) + +// MockResponse is a mock of Response interface. +type MockResponse struct { + ctrl *gomock.Controller + recorder *MockResponseMockRecorder +} + +// MockResponseMockRecorder is the mock recorder for MockResponse. +type MockResponseMockRecorder struct { + mock *MockResponse +} + +// NewMockResponse creates a new mock instance. +func NewMockResponse(ctrl *gomock.Controller) *MockResponse { + mock := &MockResponse{ctrl: ctrl} + mock.recorder = &MockResponseMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockResponse) EXPECT() *MockResponseMockRecorder { + return m.recorder +} + +// GetError mocks base method. +func (m *MockResponse) GetError() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetError") + ret0, _ := ret[0].(error) + return ret0 +} + +// GetError indicates an expected call of GetError. +func (mr *MockResponseMockRecorder) GetError() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetError", reflect.TypeOf((*MockResponse)(nil).GetError)) +} + +// GetLatency mocks base method. +func (m *MockResponse) GetLatency() time.Duration { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLatency") + ret0, _ := ret[0].(time.Duration) + return ret0 +} + +// GetLatency indicates an expected call of GetLatency. +func (mr *MockResponseMockRecorder) GetLatency() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatency", reflect.TypeOf((*MockResponse)(nil).GetLatency)) +} + +// GetRespTime mocks base method. +func (m *MockResponse) GetRespTime() time.Duration { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRespTime") + ret0, _ := ret[0].(time.Duration) + return ret0 +} + +// GetRespTime indicates an expected call of GetRespTime. +func (mr *MockResponseMockRecorder) GetRespTime() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRespTime", reflect.TypeOf((*MockResponse)(nil).GetRespTime)) +} + +// IsPermanentError mocks base method. +func (m *MockResponse) IsPermanentError() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsPermanentError") + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsPermanentError indicates an expected call of IsPermanentError. +func (mr *MockResponseMockRecorder) IsPermanentError() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsPermanentError", reflect.TypeOf((*MockResponse)(nil).IsPermanentError)) +} + +// IsRetryMoreError mocks base method. +func (m *MockResponse) IsRetryMoreError() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsRetryMoreError") + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsRetryMoreError indicates an expected call of IsRetryMoreError. +func (mr *MockResponseMockRecorder) IsRetryMoreError() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsRetryMoreError", reflect.TypeOf((*MockResponse)(nil).IsRetryMoreError)) +} + +// IsSucceed mocks base method. +func (m *MockResponse) IsSucceed() bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsSucceed") + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsSucceed indicates an expected call of IsSucceed. +func (mr *MockResponseMockRecorder) IsSucceed() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsSucceed", reflect.TypeOf((*MockResponse)(nil).IsSucceed)) +} diff --git a/pkg/client/response_v3.go b/pkg/client/response_v3.go new file mode 100644 index 00000000..1dd69755 --- /dev/null +++ b/pkg/client/response_v3.go @@ -0,0 +1,55 @@ +package client + +import ( + "fmt" + "strings" + "time" + + nebula "github.com/vesoft-inc/nebula-go/v3" +) + +type defaultResponseV3 struct { + *nebula.ResultSet + respTime time.Duration +} + +func newResponseV3(rs *nebula.ResultSet, respTime time.Duration) Response { + return defaultResponseV3{ + ResultSet: rs, + respTime: respTime, + } +} + +func (resp defaultResponseV3) GetLatency() time.Duration { + return time.Duration(resp.ResultSet.GetLatency()) * time.Microsecond +} + +func (resp defaultResponseV3) GetRespTime() time.Duration { + return resp.respTime +} + +func (resp defaultResponseV3) GetError() error { + if resp.ResultSet.IsSucceed() { + return nil + } + errorCode := resp.ResultSet.GetErrorCode() + errorMsg := resp.ResultSet.GetErrorMsg() + return fmt.Errorf("%d:%s", errorCode, errorMsg) +} + +func (resp defaultResponseV3) IsPermanentError() bool { + switch resp.ResultSet.GetErrorCode() { //nolint:exhaustive + default: + return false + case nebula.ErrorCode_E_SYNTAX_ERROR: + case nebula.ErrorCode_E_SEMANTIC_ERROR: + } + return true +} + +func (resp defaultResponseV3) IsRetryMoreError() bool { + errorMsg := resp.ResultSet.GetErrorMsg() + // TODO: compare with E_RAFT_BUFFER_OVERFLOW + // Can not get the E_RAFT_BUFFER_OVERFLOW inside storage now. + return strings.Contains(errorMsg, "raft buffer is full") +} diff --git a/pkg/client/response_v3_test.go b/pkg/client/response_v3_test.go new file mode 100644 index 00000000..e81d4be8 --- /dev/null +++ b/pkg/client/response_v3_test.go @@ -0,0 +1,78 @@ +//go:build linux + +package client + +import ( + "time" + + nebula "github.com/vesoft-inc/nebula-go/v3" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("defaultResponseV3", func() { + It("newResponseV3", func() { + patches := gomonkey.NewPatches() + defer patches.Reset() + + rs := nebula.ResultSet{} + resp := newResponseV3(&rs, time.Second) + + patches.ApplyMethodReturn(rs, "GetErrorCode", nebula.ErrorCode_SUCCEEDED) + patches.ApplyMethodReturn(rs, "GetErrorMsg", "") + + err := resp.GetError() + Expect(err).NotTo(HaveOccurred()) + + patches.Reset() + + patches.ApplyMethodReturn(rs, "GetLatency", int64(1)) + patches.ApplyMethodReturn(rs, "GetErrorCode", nebula.ErrorCode_E_DISCONNECTED) + patches.ApplyMethodReturn(rs, "GetErrorMsg", "test msg") + + err = resp.GetError() + Expect(err).To(HaveOccurred()) + + Expect(resp.GetLatency()).To(Equal(time.Microsecond)) + Expect(resp.GetRespTime()).To(Equal(time.Second)) + Expect(resp.IsPermanentError()).To(BeFalse()) + Expect(resp.IsRetryMoreError()).To(BeFalse()) + }) + + DescribeTable("IsPermanentError", + func(errorCode nebula.ErrorCode, isPermanentError bool) { + patches := gomonkey.NewPatches() + defer patches.Reset() + + rs := nebula.ResultSet{} + resp := newResponseV3(&rs, time.Second) + + patches.ApplyMethodReturn(rs, "GetErrorCode", errorCode) + + Expect(resp.IsPermanentError()).To(Equal(isPermanentError)) + }, + EntryDescription("%[1]s -> %[2]t"), + Entry(nil, nebula.ErrorCode_E_SYNTAX_ERROR, true), + Entry(nil, nebula.ErrorCode_E_SEMANTIC_ERROR, true), + Entry(nil, nebula.ErrorCode_E_DISCONNECTED, false), + ) + + DescribeTable("IsPermanentError", + func(errorMsg string, isPermanentError bool) { + patches := gomonkey.NewPatches() + defer patches.Reset() + + rs := nebula.ResultSet{} + resp := newResponseV3(&rs, time.Second) + + patches.ApplyMethodReturn(rs, "GetErrorMsg", errorMsg) + + Expect(resp.IsRetryMoreError()).To(Equal(isPermanentError)) + }, + EntryDescription("%[1]s -> %[2]t"), + Entry(nil, "x raft buffer is full x", true), + Entry(nil, "x x", false), + ) +}) diff --git a/pkg/client/session.go b/pkg/client/session.go new file mode 100644 index 00000000..fdb8f434 --- /dev/null +++ b/pkg/client/session.go @@ -0,0 +1,8 @@ +//go:generate mockgen -source=session.go -destination session_mock.go -package client Session +package client + +type Session interface { + Open() error + Execute(statement string) (Response, error) + Close() error +} diff --git a/pkg/client/session_mock.go b/pkg/client/session_mock.go new file mode 100644 index 00000000..e5986c47 --- /dev/null +++ b/pkg/client/session_mock.go @@ -0,0 +1,77 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: session.go + +// Package client is a generated GoMock package. +package client + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockSession is a mock of Session interface. +type MockSession struct { + ctrl *gomock.Controller + recorder *MockSessionMockRecorder +} + +// MockSessionMockRecorder is the mock recorder for MockSession. +type MockSessionMockRecorder struct { + mock *MockSession +} + +// NewMockSession creates a new mock instance. +func NewMockSession(ctrl *gomock.Controller) *MockSession { + mock := &MockSession{ctrl: ctrl} + mock.recorder = &MockSessionMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockSession) EXPECT() *MockSessionMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockSession) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockSessionMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockSession)(nil).Close)) +} + +// Execute mocks base method. +func (m *MockSession) Execute(statement string) (Response, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Execute", statement) + ret0, _ := ret[0].(Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Execute indicates an expected call of Execute. +func (mr *MockSessionMockRecorder) Execute(statement interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Execute", reflect.TypeOf((*MockSession)(nil).Execute), statement) +} + +// Open mocks base method. +func (m *MockSession) Open() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open") + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *MockSessionMockRecorder) Open() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockSession)(nil).Open)) +} diff --git a/pkg/client/session_v3.go b/pkg/client/session_v3.go new file mode 100644 index 00000000..76a3e388 --- /dev/null +++ b/pkg/client/session_v3.go @@ -0,0 +1,75 @@ +//go:generate mockgen -source=session.go -destination session_mock.go -package client Session +package client + +import ( + "fmt" + "time" + + nebula "github.com/vesoft-inc/nebula-go/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" +) + +type ( + defaultSessionV3 struct { + session *nebula.Session + hostAddress nebula.HostAddress + user string + password string + logger logger.Logger + } +) + +func newSessionV3(hostAddress HostAddress, user, password string, l logger.Logger) Session { + if l == nil { + l = logger.NopLogger + } + return &defaultSessionV3{ + hostAddress: nebula.HostAddress{ + Host: hostAddress.Host, + Port: hostAddress.Port, + }, + user: user, + password: password, + logger: l, + } +} + +func (s *defaultSessionV3) Open() error { + hostAddress := s.hostAddress + pool, err := nebula.NewConnectionPool( + []nebula.HostAddress{hostAddress}, + nebula.PoolConfig{ + MaxConnPoolSize: 1, + }, + newNebulaLogger(s.logger.With(logger.Field{ + Key: "address", + Value: fmt.Sprintf("%s:%d", hostAddress.Host, hostAddress.Port), + })), + ) + if err != nil { + return err + } + + session, err := pool.GetSession(s.user, s.password) + if err != nil { + return err + } + + s.session = session + + return nil +} + +func (s *defaultSessionV3) Execute(statement string) (Response, error) { + startTime := time.Now() + rs, err := s.session.Execute(statement) + if err != nil { + return nil, err + } + return newResponseV3(rs, time.Since(startTime)), nil +} + +func (s *defaultSessionV3) Close() error { + s.session.Release() + return nil +} diff --git a/pkg/client/session_v3_test.go b/pkg/client/session_v3_test.go new file mode 100644 index 00000000..b441b7ec --- /dev/null +++ b/pkg/client/session_v3_test.go @@ -0,0 +1,79 @@ +//go:build linux + +package client + +import ( + stderrors "errors" + + nebula "github.com/vesoft-inc/nebula-go/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("SessionV3", func() { + It("success", func() { + session := newSessionV3(HostAddress{}, "user", "password", nil) + pool := &nebula.ConnectionPool{} + nSession := &nebula.Session{} + + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFuncReturn(nebula.NewConnectionPool, pool, nil) + patches.ApplyMethodReturn(pool, "GetSession", nSession, nil) + + patches.ApplyMethodReturn(nSession, "Execute", &nebula.ResultSet{}, nil) + patches.ApplyMethodReturn(nSession, "Release") + + err := session.Open() + Expect(err).NotTo(HaveOccurred()) + resp, err := session.Execute("") + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + + err = session.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("failed", func() { + session := newSessionV3(HostAddress{}, "user", "password", logger.NopLogger) + pool := &nebula.ConnectionPool{} + nSession := &nebula.Session{} + + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFuncReturn(nebula.NewConnectionPool, nil, stderrors.New("new connection pool failed")) + + err := session.Open() + Expect(err).To(HaveOccurred()) + + patches.Reset() + + patches.ApplyFuncReturn(nebula.NewConnectionPool, pool, nil) + patches.ApplyMethodReturn(pool, "GetSession", nil, stderrors.New("get session failed")) + + err = session.Open() + Expect(err).To(HaveOccurred()) + + patches.Reset() + + patches.ApplyFuncReturn(nebula.NewConnectionPool, pool, nil) + patches.ApplyMethodReturn(pool, "GetSession", nSession, nil) + + patches.ApplyMethodReturn(nSession, "Execute", nil, stderrors.New("execute failed")) + patches.ApplyMethodReturn(nSession, "Release") + + err = session.Open() + Expect(err).NotTo(HaveOccurred()) + resp, err := session.Execute("") + Expect(err).To(HaveOccurred()) + Expect(resp).To(BeNil()) + + err = session.Close() + Expect(err).NotTo(HaveOccurred()) + }) +}) diff --git a/pkg/cmd/common/io.go b/pkg/cmd/common/io.go new file mode 100644 index 00000000..0ae61a1c --- /dev/null +++ b/pkg/cmd/common/io.go @@ -0,0 +1,9 @@ +package common + +import "io" + +type IOStreams struct { + In io.Reader + Out io.Writer + ErrOut io.Writer +} diff --git a/pkg/cmd/nebula-importer.go b/pkg/cmd/nebula-importer.go new file mode 100644 index 00000000..c0fc0e52 --- /dev/null +++ b/pkg/cmd/nebula-importer.go @@ -0,0 +1,124 @@ +package cmd + +import ( + "os" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/cmd/common" + "github.com/vesoft-inc/nebula-importer/v4/pkg/config" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" + + "github.com/spf13/cobra" +) + +type ( + ImporterOptions struct { + common.IOStreams + Arguments []string + ConfigFile string + cfg config.Configurator + logger logger.Logger + useNopLogger bool // for test + pool client.Pool + mgr manager.Manager + } +) + +func NewImporterOptions(streams common.IOStreams) *ImporterOptions { + return &ImporterOptions{ + IOStreams: streams, + } +} + +func NewDefaultImporterCommand() *cobra.Command { + o := NewImporterOptions(common.IOStreams{ + In: os.Stdin, + Out: os.Stdout, + ErrOut: os.Stderr, + }) + return NewImporterCommand(o) +} + +func NewImporterCommand(o *ImporterOptions) *cobra.Command { + cmd := &cobra.Command{ + Use: "nebula-importer", + Short: `The NebulaGraph Importer Tool.`, + RunE: func(cmd *cobra.Command, args []string) (err error) { + defer func() { + if err != nil { + l := o.logger + + if l == nil || o.useNopLogger { + l = logger.NopLogger + } + + e := errors.NewImportError(err) + fields := logger.MapToFields(e.Fields()) + l.SkipCaller(1).WithError(e.Cause()).Error("failed to execute", fields...) + } + if o.pool != nil { + _ = o.pool.Close() + } + if o.logger != nil { + _ = o.logger.Sync() + _ = o.logger.Close() + } + }() + err = o.Complete(cmd, args) + if err != nil { + return err + } + err = o.Validate() + if err != nil { + return err + } + return o.Run(cmd, args) + }, + } + o.AddFlags(cmd) + return cmd +} + +func (*ImporterOptions) Complete(_ *cobra.Command, _ []string) error { + return nil +} + +func (o *ImporterOptions) Validate() error { + cfg, err := config.FromFile(o.ConfigFile) + if err != nil { + return err + } + + if err = cfg.Optimize(o.ConfigFile); err != nil { + return err + } + + if err = cfg.Build(); err != nil { + return err + } + + o.cfg = cfg + o.logger = cfg.GetLogger() + o.pool = cfg.GetClientPool() + o.mgr = cfg.GetManager() + + return nil +} + +func (o *ImporterOptions) Run(_ *cobra.Command, _ []string) error { + if err := o.mgr.Start(); err != nil { + return err + } + //revive:disable-next-line:if-return + if err := o.mgr.Wait(); err != nil { + return err + } + return nil +} + +func (o *ImporterOptions) AddFlags(cmd *cobra.Command) { + cmd.Flags().StringVarP(&o.ConfigFile, "config", "c", o.ConfigFile, + "specify nebula-importer configure file") +} diff --git a/pkg/cmd/nebula-importer_suite_test.go b/pkg/cmd/nebula-importer_suite_test.go new file mode 100644 index 00000000..45805152 --- /dev/null +++ b/pkg/cmd/nebula-importer_suite_test.go @@ -0,0 +1,13 @@ +package cmd + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestCmd(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg cmd Suite") +} diff --git a/pkg/cmd/nebula-importer_test.go b/pkg/cmd/nebula-importer_test.go new file mode 100644 index 00000000..6476d3dd --- /dev/null +++ b/pkg/cmd/nebula-importer_test.go @@ -0,0 +1,147 @@ +//go:build linux + +package cmd + +import ( + stderrors "errors" + "os" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/cmd/common" + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" + + "github.com/agiledragon/gomonkey/v2" + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ImporterCommand", func() { + var ( + patches *gomonkey.Patches + ctrl *gomock.Controller + mockClient *client.MockClient + mockClientPool *client.MockPool + mockResponse *client.MockResponse + mockManager *manager.MockManager + ) + BeforeEach(func() { + patches = gomonkey.NewPatches() + ctrl = gomock.NewController(GinkgoT()) + mockClient = client.NewMockClient(ctrl) + mockClientPool = client.NewMockPool(ctrl) + mockResponse = client.NewMockResponse(ctrl) + mockManager = manager.NewMockManager(ctrl) + }) + AfterEach(func() { + ctrl.Finish() + patches.Reset() + }) + + It("successfully", func() { + patches.ApplyFuncReturn(client.NewPool, mockClientPool) + + mockClientPool.EXPECT().GetClient(gomock.Any()).AnyTimes().Return(mockClient, nil) + mockClientPool.EXPECT().Open().AnyTimes().Return(nil) + mockClientPool.EXPECT().Execute(gomock.Any()).AnyTimes().Return(mockResponse, nil) + mockClientPool.EXPECT().Close().AnyTimes().Return(nil) + + mockClient.EXPECT().Open().AnyTimes().Return(nil) + mockClient.EXPECT().Execute(gomock.Any()).AnyTimes().Return(mockResponse, nil) + mockClient.EXPECT().Close().AnyTimes().Return(nil) + + mockResponse.EXPECT().IsSucceed().AnyTimes().Return(true) + mockResponse.EXPECT().GetLatency().AnyTimes().Return(time.Microsecond * 2) + mockResponse.EXPECT().GetRespTime().AnyTimes().Return(time.Microsecond * 2) + + for _, f := range []string{ + "testdata/nebula-importer.v3.yaml", + } { + command := NewDefaultImporterCommand() + command.SetArgs([]string{"-c", f}) + err := command.Execute() + Expect(err).NotTo(HaveOccurred()) + } + }) + + It("parse file failed", func() { + command := NewDefaultImporterCommand() + command.SetArgs([]string{"-c", "testdata/not-exists/nebula-importer.yaml"}) + err := command.Execute() + Expect(err).To(HaveOccurred()) + }) + + It("optimize failed", func() { + command := NewDefaultImporterCommand() + command.SetArgs([]string{"-c", "testdata/optimize-failed.yaml"}) + err := command.Execute() + Expect(err).To(HaveOccurred()) + }) + + It("build failed", func() { + command := NewDefaultImporterCommand() + command.SetArgs([]string{"-c", "testdata/build-failed.yaml"}) + err := command.Execute() + Expect(err).To(HaveOccurred()) + }) + + It("complete failed", func() { + o := NewImporterOptions(common.IOStreams{ + In: os.Stdin, + Out: os.Stdout, + ErrOut: os.Stderr, + }) + + patches.ApplyMethodReturn(o, "Complete", stderrors.New("test error")) + + o.useNopLogger = true + command := NewImporterCommand(o) + command.SetArgs([]string{"-c", "testdata/nebula-importer.v3.yaml"}) + + err := command.Execute() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("test error"))) + }) + + It("manager start failed", func() { + patches.ApplyFuncReturn(manager.NewWithOpts, mockManager) + mockManager.EXPECT().Import(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes().Return(nil) + mockManager.EXPECT().Start().Return(stderrors.New("test error")) + + o := NewImporterOptions(common.IOStreams{ + In: os.Stdin, + Out: os.Stdout, + ErrOut: os.Stderr, + }) + + o.useNopLogger = true + command := NewImporterCommand(o) + command.SetArgs([]string{"-c", "testdata/nebula-importer.v3.yaml"}) + + err := command.Execute() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("test error"))) + }) + + It("manager wait failed", func() { + patches.ApplyFuncReturn(manager.NewWithOpts, mockManager) + mockManager.EXPECT().Import(gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes().Return(nil) + mockManager.EXPECT().Start().Return(nil) + mockManager.EXPECT().Wait().Return(stderrors.New("test error")) + + o := NewImporterOptions(common.IOStreams{ + In: os.Stdin, + Out: os.Stdout, + ErrOut: os.Stderr, + }) + + o.useNopLogger = true + command := NewImporterCommand(o) + command.SetArgs([]string{"-c", "testdata/nebula-importer.v3.yaml"}) + + err := command.Execute() + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(stderrors.New("test error"))) + }) +}) diff --git a/pkg/cmd/runner.go b/pkg/cmd/runner.go deleted file mode 100644 index 7497b855..00000000 --- a/pkg/cmd/runner.go +++ /dev/null @@ -1,143 +0,0 @@ -package cmd - -import ( - "errors" - "fmt" - "sync" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/client" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/errhandler" - importerError "github.com/vesoft-inc/nebula-importer/v3/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" - "github.com/vesoft-inc/nebula-importer/v3/pkg/reader" - "github.com/vesoft-inc/nebula-importer/v3/pkg/stats" -) - -type Runner struct { - errs []error - Readers []*reader.FileReader - stataMgr *stats.StatsMgr - NumFailed int64 -} - -func (r *Runner) Error() error { - if len(r.errs) == 0 { - return nil - } - - // TODO(yee): Only return first error - return r.errs[0] -} - -func (r *Runner) Errors() []error { - return r.errs -} - -func (r *Runner) Run(yaml *config.YAMLConfig) { - defer func() { - if re := recover(); re != nil { - r.errs = append(r.errs, importerError.Wrap(importerError.UnknownError, fmt.Errorf("%v", re))) - } - }() - - runnerLogger := logger.NewRunnerLogger(*yaml.LogPath) - - statsMgr := stats.NewStatsMgr(yaml.Files, runnerLogger) - defer statsMgr.Close() - - clientMgr, err := client.NewNebulaClientMgr(yaml.NebulaClientSettings, statsMgr.StatsCh, runnerLogger) - if err != nil { - r.errs = append(r.errs, importerError.Wrap(importerError.NebulaError, err)) - return - } - defer clientMgr.Close() - - errHandler := errhandler.New(statsMgr.StatsCh) - - freaders := make([]*reader.FileReader, len(yaml.Files)) - - var wgReaders sync.WaitGroup - for i, file := range yaml.Files { - errCh, err := errHandler.Init(file, clientMgr.GetNumConnections(), *yaml.RemoveTempFiles, runnerLogger) - if err != nil { - r.errs = append(r.errs, importerError.Wrap(importerError.ConfigError, err)) - statsMgr.StatsCh <- base.NewFileDoneStats(*file.Path) - continue - } - - if fr, err := reader.New(i, file, *yaml.RemoveTempFiles, clientMgr.GetRequestChans(), errCh, runnerLogger); err != nil { - r.errs = append(r.errs, importerError.Wrap(importerError.ConfigError, err)) - statsMgr.StatsCh <- base.NewFileDoneStats(*file.Path) - continue - } else { - runnerLogger.Infof("Start to read %s", *file.Path) - wgReaders.Add(1) - go func(fr *reader.FileReader, filename string) { - defer func() { - runnerLogger.Infof("Finish to read %s", filename) - wgReaders.Done() - }() - numReadFailed, err := fr.Read() - statsMgr.Stats.NumReadFailed += numReadFailed - if err != nil { - r.errs = append(r.errs, err) - statsMgr.StatsCh <- base.NewFileDoneStats(filename) - } - }(fr, *file.Path) - freaders[i] = fr - } - } - - r.Readers = freaders - r.stataMgr = statsMgr - - runnerLogger.Infof("Waiting for stats manager done") - <-statsMgr.DoneCh - runnerLogger.Infof("Waiting for all readers exit") - for _, r := range freaders { - if r != nil { - r.Stop() - } - } - // fix issues/219 - // The number of times `statsMgr.StatsCh <- base.NewFileDoneStats(filename)` has reached the number of readers, - // then <-statsMgr.DoneCh return, but not all readers have exited. - // So, it's need to wait for it exit. - wgReaders.Wait() - runnerLogger.Infof("All readers exited") - - r.stataMgr.CountFileBytes(r.Readers) - r.Readers = nil - r.NumFailed = statsMgr.Stats.NumFailed - - if statsMgr.Stats.NumFailed > 0 { - r.errs = append(r.errs, importerError.Wrap(importerError.NotCompleteError, - fmt.Errorf("Total %d lines fail to insert into nebula graph database", statsMgr.Stats.NumFailed))) - } -} - -func (r *Runner) QueryStats() (*stats.Stats, error) { - if r.stataMgr != nil { - if r.Readers != nil { - err := r.stataMgr.CountFileBytes(r.Readers) - if err != nil { - return nil, importerError.Wrap(importerError.NotCompleteError, err) - } - } - if r.stataMgr.Done == true { - return &r.stataMgr.Stats, nil - } - r.stataMgr.StatsCh <- base.NewOutputStats() - select { - case stats, ok := <-r.stataMgr.OutputStatsCh: - if !ok { - return nil, importerError.Wrap(importerError.UnknownError, errors.New("output stats to chanel fail")) - } - return &stats, nil - } - } else { - return nil, importerError.Wrap(importerError.NotCompleteError, errors.New("stataMgr not init complete")) - } -} diff --git a/pkg/cmd/testdata/build-failed.yaml b/pkg/cmd/testdata/build-failed.yaml new file mode 100644 index 00000000..fe4fd40a --- /dev/null +++ b/pkg/cmd/testdata/build-failed.yaml @@ -0,0 +1,51 @@ +client: + version: v3 + address: "127.0.0.1:0" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + graphName: "" + batch: 100 + readerConcurrency: 10 + importerConcurrency: 10 + statsInterval: 10s + hooks: + before: + - statements: + - statement1 + - statements: + - statement2 + wait: 1ms + +sources: + - path: ./node1.csv + nodes: + - name: node1 + id: + name: "id" + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + +log: + level: INFO + console: true + files: + - nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/pkg/cmd/testdata/edge1.csv b/pkg/cmd/testdata/edge1.csv new file mode 100644 index 00000000..68c7f70a --- /dev/null +++ b/pkg/cmd/testdata/edge1.csv @@ -0,0 +1 @@ +1,1,x diff --git a/pkg/cmd/testdata/nebula-importer.v3.yaml b/pkg/cmd/testdata/nebula-importer.v3.yaml new file mode 100644 index 00000000..43673fd6 --- /dev/null +++ b/pkg/cmd/testdata/nebula-importer.v3.yaml @@ -0,0 +1,143 @@ +client: + version: v3 + address: "127.0.0.1:0" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: graphName + batch: 100 + readerConcurrency: 10 + importerConcurrency: 10 + statsInterval: 10s + hooks: + before: + - statements: + - statement1 + - statements: + - statement2 + wait: 1ms + +sources: + - path: ./node1.csv + tags: + - name: node1 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + - name: node2 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + + - path: ./edge1.csv + edges: + - name: edge1 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "prop1" + type: "STRING" + index: 2 + - name: edge2 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "prop1" + type: "STRING" + index: 2 + + - path: ./node1.csv + tags: + - name: node1 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + - name: node2 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + edges: + - name: edge1 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + - name: edge2 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + +log: + level: INFO + console: true + files: + - nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/pkg/cmd/testdata/node1.csv b/pkg/cmd/testdata/node1.csv new file mode 100644 index 00000000..2f951b33 --- /dev/null +++ b/pkg/cmd/testdata/node1.csv @@ -0,0 +1 @@ +1,a,2,3.3 diff --git a/pkg/cmd/testdata/optimize-failed.yaml b/pkg/cmd/testdata/optimize-failed.yaml new file mode 100644 index 00000000..d2559bd3 --- /dev/null +++ b/pkg/cmd/testdata/optimize-failed.yaml @@ -0,0 +1,35 @@ +client: + version: v3 + address: "127.0.0.1:0" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + graphName: graphName + batch: 100 + readerConcurrency: 10 + importerConcurrency: 10 + statsInterval: 10s + hooks: + before: + - statements: + - statement1 + - statements: + - statement2 + wait: 1ms + +sources: + - path: ./node[a-b.csv + +log: + level: INFO + console: true + files: + - nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/pkg/cmd/util/cmd.go b/pkg/cmd/util/cmd.go new file mode 100644 index 00000000..0e8a8d47 --- /dev/null +++ b/pkg/cmd/util/cmd.go @@ -0,0 +1,13 @@ +package util + +import ( + "math/rand" + "time" + + "github.com/spf13/cobra" +) + +func Run(cmd *cobra.Command) error { + rand.Seed(time.Now().UnixNano()) + return cmd.Execute() +} diff --git a/pkg/cmd/util/cmd_test.go b/pkg/cmd/util/cmd_test.go new file mode 100644 index 00000000..3d74440c --- /dev/null +++ b/pkg/cmd/util/cmd_test.go @@ -0,0 +1,25 @@ +package util + +import ( + stderrors "errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "github.com/spf13/cobra" +) + +var _ = Describe("Run", func() { + It("success", func() { + err := Run(&cobra.Command{}) + Expect(err).NotTo(HaveOccurred()) + }) + + It("failed", func() { + err := Run(&cobra.Command{ + RunE: func(_ *cobra.Command, _ []string) error { + return stderrors.New("test error") + }, + }) + Expect(err).To(HaveOccurred()) + }) +}) diff --git a/pkg/cmd/util/error.go b/pkg/cmd/util/error.go new file mode 100644 index 00000000..0253fd6b --- /dev/null +++ b/pkg/cmd/util/error.go @@ -0,0 +1,31 @@ +package util + +import ( + "fmt" + "os" + "strings" +) + +var ( + fnFprint = fmt.Fprint + fnExit = os.Exit +) + +func CheckErr(err error) { + switch err.(type) { + case nil: + return + default: + fatal(fmt.Sprintf("%+v", err), 1) + } +} + +func fatal(msg string, code int) { + if len(msg) > 0 { + if !strings.HasSuffix(msg, "\n") { + msg += "\n" + } + _, _ = fnFprint(os.Stderr, msg) + } + fnExit(code) //revive:disable-line:deep-exit +} diff --git a/pkg/cmd/util/error_test.go b/pkg/cmd/util/error_test.go new file mode 100644 index 00000000..680e3b24 --- /dev/null +++ b/pkg/cmd/util/error_test.go @@ -0,0 +1,37 @@ +package util + +import ( + stderrors "errors" + "io" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("CheckErr", func() { + It("nil", func() { + CheckErr(nil) + }) + + It("no import error", func() { + patches := gomonkey.NewPatches() + defer patches.Reset() + + var ( + isFprintCalled bool + exitCode int + ) + patches.ApplyGlobalVar(&fnFprint, func(io.Writer, ...any) (int, error) { + isFprintCalled = true + return 0, nil + }) + patches.ApplyGlobalVar(&fnExit, func(code int) { + exitCode = code + }) + + CheckErr(stderrors.New("test error")) + Expect(isFprintCalled).To(BeTrue()) + Expect(exitCode).To(Equal(1)) + }) +}) diff --git a/pkg/cmd/util/util.go b/pkg/cmd/util/util.go new file mode 100644 index 00000000..c7d86821 --- /dev/null +++ b/pkg/cmd/util/util.go @@ -0,0 +1 @@ +package util diff --git a/pkg/cmd/util/util_suite_test.go b/pkg/cmd/util/util_suite_test.go new file mode 100644 index 00000000..1a753838 --- /dev/null +++ b/pkg/cmd/util/util_suite_test.go @@ -0,0 +1,13 @@ +package util + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestUtil(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg util Suite") +} diff --git a/pkg/config/base/client.go b/pkg/config/base/client.go new file mode 100644 index 00000000..dcec6e21 --- /dev/null +++ b/pkg/config/base/client.go @@ -0,0 +1,51 @@ +package configbase + +import ( + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" +) + +var newClientPool = client.NewPool + +const ( + ClientVersion3 = "v3" + ClientVersionDefault = ClientVersion3 +) + +type Client struct { + Version string `yaml:"version"` + Address string `yaml:"address"` + User string `yaml:"user,omitempty"` + Password string `yaml:"password,omitempty"` + ConcurrencyPerAddress int `yaml:"concurrencyPerAddress,omitempty"` + ReconnectInitialInterval time.Duration `yaml:"reconnectInitialInterval,omitempty"` + Retry int `yaml:"retry,omitempty"` + RetryInitialInterval time.Duration `yaml:"retryInitialInterval,omitempty"` +} + +func (c *Client) BuildClientPool(opts ...client.Option) (client.Pool, error) { + if c.Version == "" { + c.Version = ClientVersion3 + } + options := make([]client.Option, 0, 7+len(opts)) + options = append( + options, + client.WithAddress(c.Address), + client.WithUserPassword(c.User, c.Password), + client.WithReconnectInitialInterval(c.ReconnectInitialInterval), + client.WithRetry(c.Retry), + client.WithRetryInitialInterval(c.RetryInitialInterval), + client.WithConcurrencyPerAddress(c.ConcurrencyPerAddress), + ) + switch c.Version { + case ClientVersion3: + options = append(options, client.WithV3()) + default: + return nil, errors.ErrUnsupportedClientVersion + } + options = append(options, opts...) + pool := newClientPool(options...) + return pool, nil +} diff --git a/pkg/config/base/client_test.go b/pkg/config/base/client_test.go new file mode 100644 index 00000000..eb5d33fc --- /dev/null +++ b/pkg/config/base/client_test.go @@ -0,0 +1,92 @@ +package configbase + +import ( + stderrors "errors" + "os" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Client", func() { + Describe(".BuildClientPool", func() { + var ( + tmpdir string + ) + BeforeEach(func() { + var err error + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }) + + DescribeTable("version", + func(v string) { + c := &Client{ + Version: v, + Address: "127.0.0.1:0", + User: "root", + Password: "nebula", + ConcurrencyPerAddress: 10, + ReconnectInitialInterval: time.Second, + Retry: 3, + RetryInitialInterval: time.Second, + } + + pool, err := c.BuildClientPool() + var isSupportVersion = true + switch v { + case "": + v = ClientVersionDefault + case ClientVersion3: + default: + isSupportVersion = false + } + if isSupportVersion { + Expect(c.Version).To(Equal(v)) + Expect(err).NotTo(HaveOccurred()) + Expect(pool).NotTo(BeNil()) + } else { + Expect(stderrors.Is(err, errors.ErrUnsupportedClientVersion)).To(BeTrue()) + Expect(pool).To(BeNil()) + } + }, + EntryDescription("%[1]s"), + Entry(nil, ""), + Entry(nil, "v3"), + Entry(nil, "v"), + ) + }) + + DescribeTable("OptimizeFiles", + func(configPath string, files, expectFiles []string) { + l := &Log{ + Files: files, + } + Expect(l.OptimizeFiles(configPath)).NotTo(HaveOccurred()) + Expect(l.Files).To(Equal(expectFiles)) + }, + EntryDescription("%[1]s : %[2]v => %[3]v"), + + Entry(nil, "f.yaml", nil, nil), + Entry(nil, "./f.yaml", []string{"1.log"}, []string{"1.log"}), + Entry(nil, "f.yaml", []string{"1.log", "2.log"}, []string{"1.log", "2.log"}), + Entry(nil, "./f.yaml", []string{"d10/1.log", "./d20/2.log"}, []string{"d10/1.log", "d20/2.log"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"1.log"}, []string{"d1/1.log"}), + Entry(nil, "./d1/f.yaml", []string{"1.log", "2.log"}, []string{"d1/1.log", "d1/2.log"}), + Entry(nil, "d1/f.yaml", []string{"d10/1.log", "./d20/2.log"}, []string{"d1/d10/1.log", "d1/d20/2.log"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"/1.log"}, []string{"/1.log"}), + Entry(nil, "./d1/f.yaml", []string{"/1.log", "/2.log"}, []string{"/1.log", "/2.log"}), + Entry(nil, "d1/f.yaml", []string{"/d10/1.log", "/d20/2.log"}, []string{"/d10/1.log", "/d20/2.log"}), + ) +}) diff --git a/pkg/config/base/config.go b/pkg/config/base/config.go new file mode 100644 index 00000000..a139d011 --- /dev/null +++ b/pkg/config/base/config.go @@ -0,0 +1,15 @@ +package configbase + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" +) + +type Configurator interface { + Optimize(configPath string) error + Build() error + GetLogger() logger.Logger + GetClientPool() client.Pool + GetManager() manager.Manager +} diff --git a/pkg/config/base/config_suite_test.go b/pkg/config/base/config_suite_test.go new file mode 100644 index 00000000..039ab7cc --- /dev/null +++ b/pkg/config/base/config_suite_test.go @@ -0,0 +1,13 @@ +package configbase + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestConfig(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg config base Suite") +} diff --git a/pkg/config/base/log.go b/pkg/config/base/log.go new file mode 100644 index 00000000..04f0c12f --- /dev/null +++ b/pkg/config/base/log.go @@ -0,0 +1,49 @@ +package configbase + +import ( + "path/filepath" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +type Log struct { + Level *string `yaml:"level,omitempty"` + Console *bool `yaml:"console,omitempty"` + Files []string `yaml:"files,omitempty"` + Fields logger.Fields `yaml:"fields,omitempty"` +} + +// OptimizeFiles optimizes relative files path base to the configuration file path +func (l *Log) OptimizeFiles(configPath string) error { + if l == nil { + return nil + } + + configPathDir := filepath.Dir(configPath) + for i := range l.Files { + l.Files[i] = utils.RelativePathBaseOn(configPathDir, l.Files[i]) + } + + return nil +} + +func (l *Log) BuildLogger(opts ...logger.Option) (logger.Logger, error) { + options := make([]logger.Option, 0, 4+len(opts)) + if l != nil { + if l.Level != nil && *l.Level != "" { + options = append(options, logger.WithLevelText(*l.Level)) + } + if l.Console != nil { + options = append(options, logger.WithConsole(*l.Console)) + } + if len(l.Files) > 0 { + options = append(options, logger.WithFiles(l.Files...)) + } + if len(l.Fields) > 0 { + options = append(options, logger.WithFields(l.Fields...)) + } + } + options = append(options, opts...) + return logger.New(options...) +} diff --git a/pkg/config/base/log_test.go b/pkg/config/base/log_test.go new file mode 100644 index 00000000..c7cdb399 --- /dev/null +++ b/pkg/config/base/log_test.go @@ -0,0 +1,93 @@ +package configbase + +import ( + "os" + "path/filepath" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Log", func() { + Describe(".BuildLogger", func() { + var ( + tmpdir string + ) + + BeforeEach(func() { + var err error + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }) + It("failed", func() { + var ( + level = "INFO" + console = true + ) + configLog := Log{ + Level: &level, + Console: &console, + Files: []string{filepath.Join(tmpdir, "not-exists", "1.log")}, + Fields: nil, + } + l, err := configLog.BuildLogger() + Expect(err).To(HaveOccurred()) + Expect(l).To(BeNil()) + }) + + It("success", func() { + var ( + level = "INFO" + console = true + ) + configLog := Log{ + Level: &level, + Console: &console, + Files: []string{filepath.Join(tmpdir, "1.log")}, + Fields: logger.Fields{{Key: "k1", Value: "v1"}}, + } + + l, err := configLog.BuildLogger() + Expect(err).NotTo(HaveOccurred()) + defer l.Close() + Expect(l).NotTo(BeNil()) + }) + }) + + It(".OptimizeFiles nil", func() { + var configLog *Log + Expect(configLog.OptimizeFiles("")).NotTo(HaveOccurred()) + }) + + DescribeTable(".OptimizeFiles", + func(configPath string, files, expectFiles []string) { + l := &Log{ + Files: files, + } + Expect(l.OptimizeFiles(configPath)).NotTo(HaveOccurred()) + Expect(l.Files).To(Equal(expectFiles)) + }, + EntryDescription("%[1]s : %[2]v => %[3]v"), + + Entry(nil, "f.yaml", nil, nil), + Entry(nil, "./f.yaml", []string{"1.log"}, []string{"1.log"}), + Entry(nil, "f.yaml", []string{"1.log", "2.log"}, []string{"1.log", "2.log"}), + Entry(nil, "./f.yaml", []string{"d10/1.log", "./d20/2.log"}, []string{"d10/1.log", "d20/2.log"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"1.log"}, []string{"d1/1.log"}), + Entry(nil, "./d1/f.yaml", []string{"1.log", "2.log"}, []string{"d1/1.log", "d1/2.log"}), + Entry(nil, "d1/f.yaml", []string{"d10/1.log", "./d20/2.log"}, []string{"d1/d10/1.log", "d1/d20/2.log"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"/1.log"}, []string{"/1.log"}), + Entry(nil, "./d1/f.yaml", []string{"/1.log", "/2.log"}, []string{"/1.log", "/2.log"}), + Entry(nil, "d1/f.yaml", []string{"/d10/1.log", "/d20/2.log"}, []string{"/d10/1.log", "/d20/2.log"}), + ) +}) diff --git a/pkg/config/base/manager.go b/pkg/config/base/manager.go new file mode 100644 index 00000000..8038cc9b --- /dev/null +++ b/pkg/config/base/manager.go @@ -0,0 +1,17 @@ +package configbase + +import ( + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" +) + +type ( + Manager struct { + Batch int `yaml:"batch,omitempty"` + ReaderConcurrency int `yaml:"readerConcurrency,omitempty"` + ImporterConcurrency int `yaml:"importerConcurrency,omitempty"` + StatsInterval time.Duration `yaml:"statsInterval,omitempty"` + Hooks manager.Hooks `yaml:"hooks,omitempty"` + } +) diff --git a/pkg/config/base/source.go b/pkg/config/base/source.go new file mode 100644 index 00000000..fca59171 --- /dev/null +++ b/pkg/config/base/source.go @@ -0,0 +1,35 @@ +package configbase + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/reader" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" +) + +var sourceNew = source.New + +type ( + Source struct { + SourceConfig source.Config `yaml:",inline"` + Batch int `yaml:"batch,omitempty"` + } +) + +func (s *Source) BuildSourceAndReader(opts ...reader.Option) ( + source.Source, + reader.BatchRecordReader, + error, +) { + sourceConfig := s.SourceConfig + src, err := sourceNew(&sourceConfig) + if err != nil { + return nil, nil, err + } + if s.Batch > 0 { + // Override the batch in the manager. + opts = append(opts, reader.WithBatch(s.Batch)) + } + + rr := reader.NewRecordReader(src) + brr := reader.NewBatchRecordReader(rr, s.Batch, opts...) + return src, brr, nil +} diff --git a/pkg/config/base/source_test.go b/pkg/config/base/source_test.go new file mode 100644 index 00000000..f5df8634 --- /dev/null +++ b/pkg/config/base/source_test.go @@ -0,0 +1,75 @@ +package configbase + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + + "github.com/agiledragon/gomonkey/v2" + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Source", func() { + Describe(".BuildSourceAndReader", func() { + var ( + s *Source + ctrl *gomock.Controller + mockSource *source.MockSource + patches *gomonkey.Patches + ) + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockSource = source.NewMockSource(ctrl) + patches = gomonkey.NewPatches() + s = &Source{ + SourceConfig: source.Config{ + Local: &source.LocalConfig{ + Path: "path", + }, + CSV: &source.CSVConfig{ + Delimiter: ",", + }, + }, + Batch: 7, + } + }) + AfterEach(func() { + ctrl.Finish() + patches.Reset() + }) + It("successfully", func() { + patches.ApplyGlobalVar(&sourceNew, func(_ *source.Config) (source.Source, error) { + return mockSource, nil + }) + + mockSource.EXPECT().Name().AnyTimes().Return("source name") + mockSource.EXPECT().Config().AnyTimes().Return(&s.SourceConfig) + mockSource.EXPECT().Read(gomock.Any()).AnyTimes().DoAndReturn(func(p []byte) (int, error) { + n := copy(p, "a,b,c\n") + return n, nil + }) + + src, brr, err := s.BuildSourceAndReader() + Expect(err).NotTo(HaveOccurred()) + Expect(src).NotTo(BeNil()) + Expect(brr).NotTo(BeNil()) + + n, records, err := brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(records).To(HaveLen(7)) + Expect(n).To(Equal(6 * 7)) + }) + + It("failed", func() { + patches.ApplyGlobalVar(&sourceNew, func(_ *source.Config) (source.Source, error) { + return nil, stderrors.New("test error") + }) + src, brr, err := s.BuildSourceAndReader() + Expect(err).To(HaveOccurred()) + Expect(src).To(BeNil()) + Expect(brr).To(BeNil()) + }) + }) +}) diff --git a/pkg/config/config.go b/pkg/config/config.go index 3e5e1fac..86ef980d 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -1,992 +1,59 @@ package config import ( - "errors" - "fmt" - "io/ioutil" - "net/url" + "io" "os" - "path/filepath" - "strings" - "time" - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - ierrors "github.com/vesoft-inc/nebula-importer/v3/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" - "github.com/vesoft-inc/nebula-importer/v3/pkg/picker" - "github.com/vesoft-inc/nebula-importer/v3/pkg/utils" - "gopkg.in/yaml.v2" -) + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + configv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/config/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" -const ( - dbNULL = "NULL" + "gopkg.in/yaml.v3" ) -type NebulaClientConnection struct { - User *string `json:"user" yaml:"user"` - Password *string `json:"password" yaml:"password"` - Address *string `json:"address" yaml:"address"` -} - -type NebulaPostStart struct { - Commands *string `json:"commands" yaml:"commands"` - AfterPeriod *string `json:"afterPeriod" yaml:"afterPeriod"` -} - -type NebulaPreStop struct { - Commands *string `json:"commands" yaml:"commands"` -} - -type NebulaClientSettings struct { - Retry *int `json:"retry" yaml:"retry"` - Concurrency *int `json:"concurrency" yaml:"concurrency"` - ChannelBufferSize *int `json:"channelBufferSize" yaml:"channelBufferSize"` - Space *string `json:"space" yaml:"space"` - Connection *NebulaClientConnection `json:"connection" yaml:"connection"` - PostStart *NebulaPostStart `json:"postStart" yaml:"postStart"` // from v1 - PreStop *NebulaPreStop `json:"preStop" yaml:"preStop"` // from v1 -} - -type Prop struct { - Name *string `json:"name" yaml:"name"` - Type *string `json:"type" yaml:"type"` - Index *int `json:"index" yaml:"index"` - Nullable bool `json:"nullable" yaml:"nullable"` - NullValue string `json:"nullValue" yaml:"nullValue"` - AlternativeIndices []int `json:"alternativeIndices" yaml:"alternativeIndices"` - DefaultValue *string `json:"defaultValue" yaml:"defaultValue"` - picker picker.Picker -} - -type VID struct { - Index *int `json:"index" yaml:"index"` - ConcatItems []interface{} `json:"concatItems" yaml:"concatItems"` // only string and int is support, int is for Index - Function *string `json:"function" yaml:"function"` - Type *string `json:"type" yaml:"type"` - Prefix *string `json:"prefix" yaml:"prefix"` - picker picker.Picker -} - -type Rank struct { - Index *int `json:"index" yaml:"index"` -} - -type Edge struct { - Name *string `json:"name" yaml:"name"` - WithRanking *bool `json:"withRanking" yaml:"withRanking"` - Props []*Prop `json:"props" yaml:"props"` - SrcVID *VID `json:"srcVID" yaml:"srcVID"` - DstVID *VID `json:"dstVID" yaml:"dstVID"` - Rank *Rank `json:"rank" yaml:"rank"` -} - -type Tag struct { - Name *string `json:"name" yaml:"name"` - Props []*Prop `json:"props" yaml:"props"` -} - -type Vertex struct { - VID *VID `json:"vid" yaml:"vid"` - Tags []*Tag `json:"tags" yaml:"tags"` -} - -type Schema struct { - Type *string `json:"type" yaml:"type"` - Edge *Edge `json:"edge" yaml:"edge"` - Vertex *Vertex `json:"vertex" yaml:"vertex"` -} - -type CSVConfig struct { - WithHeader *bool `json:"withHeader" yaml:"withHeader"` - WithLabel *bool `json:"withLabel" yaml:"withLabel"` - Delimiter *string `json:"delimiter" yaml:"delimiter"` - LazyQuotes *bool `json:"lazyQuotes" yaml:"lazyQuotes"` -} - -type File struct { - Path *string `json:"path" yaml:"path"` - FailDataPath *string `json:"failDataPath" yaml:"failDataPath"` - BatchSize *int `json:"batchSize" yaml:"batchSize"` - Limit *int `json:"limit" yaml:"limit"` - InOrder *bool `json:"inOrder" yaml:"inOrder"` - Type *string `json:"type" yaml:"type"` - CSV *CSVConfig `json:"csv" yaml:"csv"` - Schema *Schema `json:"schema" yaml:"schema"` -} - -type YAMLConfig struct { - Version *string `json:"version" yaml:"version"` - Description *string `json:"description" yaml:"description"` - RemoveTempFiles *bool `json:"removeTempFiles" yaml:"removeTempFiles"` // from v1 - NebulaClientSettings *NebulaClientSettings `json:"clientSettings" yaml:"clientSettings"` - LogPath *string `json:"logPath" yaml:"logPath"` - WorkingDirectory *string `json:"workingDir" yaml:"workingDir"` - Files []*File `json:"files" yaml:"files"` -} - -var ( - kDefaultVidType = "string" - kDefaultConnAddr = "127.0.0.1:9669" - kDefaultUser = "root" - kDefaultPassword = "nebula" - kDefaultBatchSize = 128 - supportedVersions = []string{"v1rc1", "v1rc2", "v1", "v2"} +type ( + Client = configbase.Client + Log = configbase.Log + Configurator = configbase.Configurator ) -func isSupportedVersion(ver string) bool { - for _, v := range supportedVersions { - if v == ver { - return true - } - } - return false -} - -func Parse(filename string, runnerLogger logger.Logger) (*YAMLConfig, error) { - logger.SetLogger(runnerLogger) - content, err := ioutil.ReadFile(filename) - if err != nil { - return nil, ierrors.Wrap(ierrors.InvalidConfigPathOrFormat, err) - } - - var conf YAMLConfig - if err = yaml.Unmarshal(content, &conf); err != nil { - return nil, ierrors.Wrap(ierrors.InvalidConfigPathOrFormat, err) - } - - if conf.Version == nil || !isSupportedVersion(*conf.Version) { - return nil, ierrors.Wrap(ierrors.InvalidConfigPathOrFormat, - fmt.Errorf("The supported YAML configure versions are %v, please upgrade importer.", supportedVersions)) - } - abs, err := filepath.Abs(filename) - if err != nil { - return nil, ierrors.Wrap(ierrors.InvalidConfigPathOrFormat, err) - } - path := filepath.Dir(abs) - - if workingDir := conf.WorkingDirectory; workingDir != nil && len(*workingDir) > 0 { - if !filepath.IsAbs(*workingDir) { - path = filepath.Join(path, *workingDir) - } else { - path = *workingDir - } - } - - if err = conf.ValidateAndReset(path); err != nil { - return nil, ierrors.Wrap(ierrors.ConfigError, err) - } - - return &conf, nil -} - -func (config *YAMLConfig) ValidateAndReset(dir string) error { - if config.NebulaClientSettings == nil { - return errors.New("please configure clientSettings") - } - if err := config.NebulaClientSettings.validateAndReset("clientSettings"); err != nil { - return err - } - - if config.RemoveTempFiles == nil { - removeTempFiles := false - config.RemoveTempFiles = &removeTempFiles - logger.Log.Warnf("You have not configured whether to remove generated temporary files, reset to default value. removeTempFiles: %v", - *config.RemoveTempFiles) - } - - if config.LogPath == nil { - defaultPath := filepath.Join(os.TempDir(), fmt.Sprintf("nebula-importer-%d.log", time.Now().UnixNano())) - config.LogPath = &defaultPath - logger.Log.Warnf("You have not configured the log file path in: logPath, reset to default path: %s", *config.LogPath) - } - if !filepath.IsAbs(*config.LogPath) { - absPath := filepath.Join(dir, *config.LogPath) - config.LogPath = &absPath - } - - if config.Files == nil || len(config.Files) == 0 { - return errors.New("There is no files in configuration") - } - - //TODO(yuyu): check each item in config.Files - // if item is a directory, iter this directory and replace this directory config section by filename config section - if err := config.expandDirectoryToFiles(dir); err != nil { - logger.Log.Errorf("%s", err) - return err - } - for i := range config.Files { - if err := config.Files[i].validateAndReset(dir, fmt.Sprintf("files[%d]", i)); err != nil { - return err - } - } - - return nil -} - -func (config *YAMLConfig) expandDirectoryToFiles(dir string) (err error) { - var newFiles []*File - - for _, file := range config.Files { - files, err := file.expandFiles(dir) - if err != nil { - logger.Log.Errorf("error when expand file: %s", err) - return err - } - for _, f := range files { - newFiles = append(newFiles, f) - } - } - config.Files = newFiles - - return err -} - -func (n *NebulaPostStart) validateAndReset(prefix string) error { - if n.AfterPeriod != nil { - _, err := time.ParseDuration(*n.AfterPeriod) - if err != nil { - return err - } - } else { - period := "0s" - n.AfterPeriod = &period - } - - return nil -} - -func (n *NebulaClientSettings) validateAndReset(prefix string) error { - if n.Space == nil { - return fmt.Errorf("Please configure the space name in: %s.space", prefix) - } - - if n.Retry == nil { - retry := 1 - n.Retry = &retry - logger.Log.Warnf("Invalid retry option in %s.retry, reset to %d ", prefix, *n.Retry) - } - - if n.Concurrency == nil { - d := 10 - n.Concurrency = &d - logger.Log.Warnf("Invalid client concurrency in %s.concurrency, reset to %d", prefix, *n.Concurrency) - } - - if n.ChannelBufferSize == nil { - d := 128 - n.ChannelBufferSize = &d - logger.Log.Warnf("Invalid client channel buffer size in %s.channelBufferSize, reset to %d", prefix, *n.ChannelBufferSize) - } - - if n.Connection == nil { - return fmt.Errorf("Please configure the connection information in: %s.connection", prefix) - } - if err := n.Connection.validateAndReset(fmt.Sprintf("%s.connection", prefix)); err != nil { - return err - } - - if n.PostStart != nil { - return n.PostStart.validateAndReset(fmt.Sprintf("%s.postStart", prefix)) - } - return nil -} - -func (c *NebulaClientConnection) validateAndReset(prefix string) error { - if c.Address == nil { - c.Address = &kDefaultConnAddr - logger.Log.Warnf("%s.address: %s", prefix, *c.Address) - } - - if c.User == nil { - c.User = &kDefaultUser - logger.Log.Warnf("%s.user: %s", prefix, *c.User) - } - - if c.Password == nil { - c.Password = &kDefaultPassword - logger.Log.Warnf("%s.password: %s", prefix, *c.Password) - } - return nil -} - -func (f *File) IsInOrder() bool { - return (f.InOrder != nil && *f.InOrder) || (f.CSV != nil && f.CSV.WithLabel != nil && *f.CSV.WithLabel) -} - -func (f *File) validateAndReset(dir, prefix string) error { - if f.Path == nil { - return fmt.Errorf("Please configure file path in: %s.path", prefix) - } - - if base.HasHttpPrefix(*f.Path) { - if _, err := url.ParseRequestURI(*f.Path); err != nil { - return err - } - - if _, _, err := base.ExtractFilename(*f.Path); err != nil { - return err - } - - if f.FailDataPath == nil { - failDataPath := filepath.Join(os.TempDir(), fmt.Sprintf("nebula-importer-err-data-%d", time.Now().UnixNano())) - f.FailDataPath = &failDataPath - logger.Log.Warnf("You have not configured the failed data output file path in: %s.failDataPath, reset to tmp path: %s", - prefix, *f.FailDataPath) - } else { - if !filepath.IsAbs(*f.FailDataPath) { - absPath := filepath.Join(dir, *f.FailDataPath) - f.FailDataPath = &absPath - } - } - } else { - if !filepath.IsAbs(*f.Path) { - absPath := filepath.Join(dir, *f.Path) - f.Path = &absPath - } - if !base.FileExists(*f.Path) { - return fmt.Errorf("File(%s) doesn't exist", *f.Path) - } - - if f.FailDataPath == nil { - p := filepath.Join(filepath.Dir(*f.Path), "err", filepath.Base(*f.Path)) - f.FailDataPath = &p - logger.Log.Warnf("You have not configured the failed data output file path in: %s.failDataPath, reset to default path: %s", - prefix, *f.FailDataPath) - } else { - if !filepath.IsAbs(*f.FailDataPath) { - absPath := filepath.Join(dir, *f.FailDataPath) - f.FailDataPath = &absPath - } - } - } - - if f.BatchSize == nil { - f.BatchSize = &kDefaultBatchSize - logger.Log.Infof("Invalid batch size in file(%s), reset to %d", *f.Path, *f.BatchSize) - } - - if f.InOrder == nil { - inOrder := false - f.InOrder = &inOrder - } - - if strings.ToLower(*f.Type) != "csv" { - // TODO: Now only support csv import - return fmt.Errorf("Invalid file data type: %s, reset to csv", *f.Type) - } - - if f.CSV != nil { - err := f.CSV.validateAndReset(fmt.Sprintf("%s.csv", prefix)) - if err != nil { - return err - } - } - - if f.Schema == nil { - return fmt.Errorf("Please configure file schema: %s.schema", prefix) - } - return f.Schema.validateAndReset(fmt.Sprintf("%s.schema", prefix)) -} - -func (f *File) expandFiles(dir string) ([]*File, error) { - var files []*File - if base.HasHttpPrefix(*f.Path) { - if f.FailDataPath != nil { - _, filename, err := base.ExtractFilename(*f.Path) - if err != nil { - return nil, err - } - failedDataPath := filepath.Join(*f.FailDataPath, filename) - f.FailDataPath = &failedDataPath - logger.Log.Infof("Failed data path: %v", failedDataPath) - } - files = append(files, f) - } else { - if !filepath.IsAbs(*f.Path) { - absPath := filepath.Join(dir, *f.Path) - f.Path = &absPath - } - - fileNames, err := filepath.Glob(*f.Path) - if err != nil || len(fileNames) == 0 { - logger.Log.Errorf("error file path: %s", *f.Path) - return files, err - } - - for i := range fileNames { - var failedDataPath *string = nil - if f.FailDataPath != nil { - base := filepath.Base(fileNames[i]) - tmp := filepath.Join(*f.FailDataPath, base) - failedDataPath = &tmp - logger.Log.Infof("Failed data path: %v", *failedDataPath) - } - eachConf := *f - eachConf.Path = &fileNames[i] - eachConf.FailDataPath = failedDataPath - files = append(files, &eachConf) - logger.Log.Infof("find file: %v", *eachConf.Path) - } - } - - return files, nil -} - -func (c *CSVConfig) validateAndReset(prefix string) error { - if c.WithHeader == nil { - h := false - c.WithHeader = &h - logger.Log.Infof("%s.withHeader: %v", prefix, false) - } - - if c.WithLabel == nil { - l := false - c.WithLabel = &l - logger.Log.Infof("%s.withLabel: %v", prefix, false) - } - - if c.Delimiter != nil { - if len(*c.Delimiter) == 0 { - return fmt.Errorf("%s.delimiter is empty string", prefix) - } - } - - return nil -} - -func (s *Schema) IsVertex() bool { - return strings.ToUpper(*s.Type) == "VERTEX" -} - -func (s *Schema) String() string { - if s.IsVertex() { - return s.Vertex.String() - } else { - return s.Edge.String() - } -} - -func (s *Schema) CollectEmptyPropsTagNames() []string { - if !s.IsVertex() || s.Vertex == nil { - return nil +func FromBytes(content []byte) (Configurator, error) { + type tmpConfig struct { + Client struct { + Version string `yaml:"version"` + } `yaml:"client"` } - var tagNames []string - for _, tag := range s.Vertex.Tags { - if len(tag.Props) == 0 { - tagNames = append(tagNames, *tag.Name) - continue - } - for _, prop := range tag.Props { - if prop != nil { - continue - } - } - tagNames = append(tagNames, *tag.Name) + var tc tmpConfig + if err := yaml.Unmarshal(content, &tc); err != nil { + return nil, err } - return tagNames -} - -func (s *Schema) validateAndReset(prefix string) error { - var err error = nil - switch strings.ToLower(*s.Type) { - case "edge": - if s.Edge == nil { - logger.Log.Infof("%s.edge is nil", prefix) - s.Edge = &Edge{} - } - err = s.Edge.validateAndReset(fmt.Sprintf("%s.edge", prefix)) - case "vertex": - if s.Vertex == nil { - logger.Log.Infof("%s.vertex is nil", prefix) - s.Vertex = &Vertex{} - } - err = s.Vertex.validateAndReset(fmt.Sprintf("%s.vertex", prefix)) + var c Configurator + switch tc.Client.Version { + case configbase.ClientVersion3: + c = &configv3.Config{} default: - err = fmt.Errorf("Error schema type(%s) in %s.type only edge and vertex are supported", *s.Type, prefix) + return nil, errors.ErrUnsupportedClientVersion } - return err -} -func (v *VID) ParseFunction(str string) (err error) { - i := strings.Index(str, "(") - j := strings.Index(str, ")") - if i < 0 && j < 0 { - v.Function = nil - v.Type = &kDefaultVidType - } else if i > 0 && j > i { - strs := strings.ToLower(str[i+1 : j]) - fnType := strings.Split(strs, "+") - if len(fnType) == 2 { - v.Function = &fnType[0] - v.Type = &fnType[1] - v.Prefix = nil - } else if len(fnType) == 3 { - v.Function = &fnType[0] - v.Type = &fnType[1] - v.Prefix = &fnType[2] - } else { - v.Function = nil - v.Type = &fnType[0] - v.Prefix = nil - } - } else { - err = fmt.Errorf("Invalid function format: %s", str) + if err := yaml.Unmarshal(content, c); err != nil { + return nil, err } - return + return c, nil } -func (v *VID) String(vid string) string { - if (v.Function != nil && *v.Function != "") && (v.Prefix != nil && *v.Prefix != "") { - return fmt.Sprintf("%s(%s+%s+%s)", vid, *v.Function, *v.Type, *v.Prefix) - } else if (v.Function == nil || *v.Function == "") && (v.Prefix != nil && *v.Prefix != "") { - return fmt.Sprintf("%s(%s+%s+%s)", vid, "", *v.Type, *v.Prefix) - } else if (v.Function != nil && *v.Function != "") && (v.Prefix == nil || *v.Prefix == "") { - return fmt.Sprintf("%s(%s+%s)", vid, *v.Function, *v.Type) - } else { - return fmt.Sprintf("%s(%s)", vid, *v.Type) - } -} - -func (v *VID) FormatValue(record base.Record) (string, error) { - value, err := v.picker.Pick(record) +func FromReader(r io.Reader) (Configurator, error) { + content, err := io.ReadAll(r) if err != nil { - return "", err - } - return value.Val, nil -} - -func (v *VID) checkFunction(prefix string) error { - if v.Function != nil { - switch strings.ToLower(*v.Function) { - case "", "hash": - default: - return fmt.Errorf("Invalid %s.function: %s, only following values are supported: \"\", hash", prefix, *v.Function) - } + return nil, err } - return nil + return FromBytes(content) } -func (v *VID) validateAndReset(prefix string, defaultVal int) error { - if v.Index == nil { - v.Index = &defaultVal - } - if *v.Index < 0 { - return fmt.Errorf("Invalid %s.index: %d", prefix, *v.Index) - } - if err := v.checkFunction(prefix); err != nil { - return err - } - if v.Type != nil { - vidType := strings.TrimSpace(strings.ToLower(*v.Type)) - if vidType != "string" && vidType != "int" { - return fmt.Errorf("vid type must be `string' or `int', now is %s", vidType) - } - } else { - v.Type = &kDefaultVidType - logger.Log.Warnf("Not set %s.Type, reset to default value `%s'", prefix, *v.Type) - } - - return v.InitPicker() -} - -func (v *VID) InitPicker() error { - pickerConfig := picker.Config{ - Type: *v.Type, - Function: v.Function, - } - - hasPrefix := v.Prefix != nil && *v.Prefix != "" - - if len(v.ConcatItems) > 0 { - if hasPrefix { - pickerConfig.ConcatItems.AddConstant(*v.Prefix) - } - for i, item := range v.ConcatItems { - switch val := item.(type) { - case int: - pickerConfig.ConcatItems.AddIndex(val) - case string: - pickerConfig.ConcatItems.AddConstant(val) - default: - return fmt.Errorf("ConcatItems only support int or string, but the %d is %v", i, val) - } - } - } else if hasPrefix { - pickerConfig.ConcatItems.AddConstant(*v.Prefix) - pickerConfig.ConcatItems.AddIndex(*v.Index) - } else { - pickerConfig.Indices = []int{*v.Index} - } - - if (v.Function == nil || *v.Function == "") && strings.EqualFold(*v.Type, "int") { - pickerConfig.CheckOnPost = func(v *picker.Value) error { - return checkVidFormat(v.Val, true) - } - } - - var err error - v.picker, err = pickerConfig.Build() - return err -} - -func (r *Rank) validateAndReset(prefix string, defaultVal int) error { - if r.Index == nil { - r.Index = &defaultVal - } - if *r.Index < 0 { - return fmt.Errorf("Invalid %s.index: %d", prefix, *r.Index) - } - return nil -} - -func checkVidFormat(vid string, isInt bool) error { - if isInt { - if utils.IsInteger(vid) { - return nil - } - vidLen := len(vid) - if vidLen > 8 /* hash("") */ && strings.HasSuffix(vid, "\")") && strings.HasPrefix(vid, "hash(\"") { - return nil - } - return fmt.Errorf("Invalid vid format: " + vid) - } - return nil -} - -func (e *Edge) FormatValues(record base.Record) (string, error) { - var cells []string - for i, prop := range e.Props { - if c, err := prop.FormatValue(record); err != nil { - return "", fmt.Errorf("edge: %s, column: %d, error: %v", e.String(), i, err) - } else { - cells = append(cells, c) - } - } - rank := "" - if e.Rank != nil && e.Rank.Index != nil { - rank = fmt.Sprintf("@%s", record[*e.Rank.Index]) - } - srcVID, err := e.SrcVID.FormatValue(record) - if err != nil { - return "", err - } - dstVID, err := e.DstVID.FormatValue(record) +func FromFile(name string) (Configurator, error) { + f, err := os.Open(name) if err != nil { - return "", err - } - return fmt.Sprintf(" %s->%s%s:(%s) ", srcVID, dstVID, rank, strings.Join(cells, ",")), nil -} - -func (e *Edge) maxIndex() int { - maxIdx := 0 - if e.SrcVID != nil && e.SrcVID.Index != nil && *e.SrcVID.Index > maxIdx { - maxIdx = *e.SrcVID.Index - } - - if e.DstVID != nil && e.DstVID.Index != nil && *e.DstVID.Index > maxIdx { - maxIdx = *e.DstVID.Index - } - - if e.Rank != nil && e.Rank.Index != nil && *e.Rank.Index > maxIdx { - maxIdx = *e.Rank.Index - } - - for _, p := range e.Props { - if p != nil && p.Index != nil && *p.Index > maxIdx { - maxIdx = *p.Index - } - } - - return maxIdx -} - -func combine(cell, val string) string { - if len(cell) > 0 { - return fmt.Sprintf("%s/%s", cell, val) - } else { - return val - } -} - -func (e *Edge) String() string { - cells := make([]string, e.maxIndex()+1) - if e.SrcVID != nil && e.SrcVID.Index != nil { - cells[*e.SrcVID.Index] = combine(cells[*e.SrcVID.Index], e.SrcVID.String(base.LABEL_SRC_VID)) - } - if e.DstVID != nil && e.DstVID.Index != nil { - cells[*e.DstVID.Index] = combine(cells[*e.DstVID.Index], e.DstVID.String(base.LABEL_DST_VID)) - } - if e.Rank != nil && e.Rank.Index != nil { - cells[*e.Rank.Index] = combine(cells[*e.Rank.Index], base.LABEL_RANK) - } - for _, prop := range e.Props { - if prop.Index != nil { - cells[*prop.Index] = combine(cells[*prop.Index], prop.String(*e.Name)) - } - } - for i := range cells { - if cells[i] == "" { - cells[i] = base.LABEL_IGNORE - } - } - return strings.Join(cells, ",") -} - -func (e *Edge) validateAndReset(prefix string) error { - if e.Name == nil { - return fmt.Errorf("Please configure edge name in: %s.name", prefix) - } - - if e.SrcVID == nil { - index := 0 - e.SrcVID = &VID{Index: &index, Type: &kDefaultVidType} - } - if err := e.SrcVID.validateAndReset(fmt.Sprintf("%s.srcVID", prefix), 0); err != nil { - return err - } - - if e.DstVID == nil { - index := 1 - e.DstVID = &VID{Index: &index, Type: &kDefaultVidType} - } - if err := e.DstVID.validateAndReset(fmt.Sprintf("%s.dstVID", prefix), 1); err != nil { - return err - } - - start := 2 - if e.Rank != nil { - if err := e.Rank.validateAndReset(fmt.Sprintf("%s.rank", prefix), 2); err != nil { - return err - } - start++ - } else { - if e.WithRanking != nil && *e.WithRanking { - index := 2 - e.Rank = &Rank{Index: &index} - start++ - } - } - for i := range e.Props { - if e.Props[i] != nil { - if err := e.Props[i].validateAndReset(fmt.Sprintf("%s.prop[%d]", prefix, i), i+start); err != nil { - return err - } - } else { - logger.Log.Errorf("prop %d of edge %s is nil", i, *e.Name) - } - } - return nil -} - -func (v *Vertex) FormatValues(record base.Record) (string, error) { - var cells []string - for _, tag := range v.Tags { - str, noProps, err := tag.FormatValues(record) - if err != nil { - return "", err - } - if !noProps { - cells = append(cells, str) - } - } - vid, err := v.VID.FormatValue(record) - if err != nil { - return "", err - } - return fmt.Sprintf(" %s: (%s)", vid, strings.Join(cells, ",")), nil -} - -func (v *Vertex) maxIndex() int { - maxIdx := 0 - if v.VID != nil && v.VID.Index != nil && *v.VID.Index > maxIdx { - maxIdx = *v.VID.Index - } - for _, tag := range v.Tags { - if tag != nil { - for _, prop := range tag.Props { - if prop != nil && prop.Index != nil && *prop.Index > maxIdx { - maxIdx = *prop.Index - } - } - } - } - - return maxIdx -} - -func (v *Vertex) String() string { - cells := make([]string, v.maxIndex()+1) - if v.VID != nil && v.VID.Index != nil { - cells[*v.VID.Index] = v.VID.String(base.LABEL_VID) - } - for _, tag := range v.Tags { - for _, prop := range tag.Props { - if prop != nil && prop.Index != nil { - cells[*prop.Index] = combine(cells[*prop.Index], prop.String(*tag.Name)) - } - } - } - - for i := range cells { - if cells[i] == "" { - cells[i] = base.LABEL_IGNORE - } - } - return strings.Join(cells, ",") -} - -func (v *Vertex) validateAndReset(prefix string) error { - // if v.Tags == nil { - // return fmt.Errorf("Please configure %.tags", prefix) - // } - if v.VID == nil { - index := 0 - v.VID = &VID{Index: &index, Type: &kDefaultVidType} - } - if err := v.VID.validateAndReset(fmt.Sprintf("%s.vid", prefix), 0); err != nil { - return err - } - j := 1 - for i := range v.Tags { - if v.Tags[i] != nil { - if err := v.Tags[i].validateAndReset(fmt.Sprintf("%s.tags[%d]", prefix, i), j); err != nil { - return err - } - j = j + len(v.Tags[i].Props) - } else { - logger.Log.Errorf("tag %d is nil", i) - } - } - return nil -} - -func (p *Prop) IsStringType() bool { - return strings.ToLower(*p.Type) == "string" -} - -func (p *Prop) IsDateOrTimeType() bool { - t := strings.ToLower(*p.Type) - return t == "date" || t == "time" || t == "datetime" || t == "timestamp" -} - -func (p *Prop) IsTimestampType() bool { - t := strings.ToLower(*p.Type) - return t == "timestamp" -} - -func (p *Prop) IsGeographyType() bool { - t := strings.ToLower(*p.Type) - return t == "geography" || t == "geography(point)" || t == "geography(linestring)" || t == "geography(polygon)" -} - -func (p *Prop) FormatValue(record base.Record) (string, error) { - value, err := p.picker.Pick(record) - if err != nil { - return "", err - } - return value.Val, nil -} - -func (p *Prop) getValue(record base.Record) (string, bool, error) { - if p.Index != nil && *p.Index >= len(record) { - return "", false, fmt.Errorf("Prop index %d out range %d of record(%v)", *p.Index, len(record), record) - } - - r := record[*p.Index] - if !p.Nullable { - return r, false, nil - } - - if r != p.NullValue { - return r, false, nil - } - - for _, idx := range p.AlternativeIndices { - if idx >= len(record) { - return "", false, fmt.Errorf("Prop index %d out range %d of record(%v)", idx, len(record), record) - } - r = record[idx] - if r != p.NullValue { - return r, false, nil - } - } - if p.DefaultValue != nil { - return *p.DefaultValue, false, nil - } - return dbNULL, true, nil -} - -func (p *Prop) String(prefix string) string { - return fmt.Sprintf("%s.%s:%s", prefix, *p.Name, *p.Type) -} - -func (p *Prop) validateAndReset(prefix string, val int) error { - *p.Type = strings.ToLower(*p.Type) - if !base.IsValidType(*p.Type) { - return fmt.Errorf("Error property type of %s.type: %s", prefix, *p.Type) - } - if p.Index == nil { - p.Index = &val - } else { - if *p.Index < 0 { - return fmt.Errorf("Invalid prop index: %d, name: %s, type: %s", *p.Index, *p.Name, *p.Type) - } - } - return p.InitPicker() -} - -func (p *Prop) InitPicker() error { - pickerConfig := picker.Config{ - Indices: []int{*p.Index}, - Type: *p.Type, - } - - if p.Nullable { - pickerConfig.Nullable = func(s string) bool { - return s == p.NullValue - } - pickerConfig.NullValue = dbNULL - if len(p.AlternativeIndices) > 0 { - pickerConfig.Indices = append(pickerConfig.Indices, p.AlternativeIndices...) - } - pickerConfig.DefaultValue = p.DefaultValue - } - - var err error - p.picker, err = pickerConfig.Build() - return err -} - -func (t *Tag) FormatValues(record base.Record) (string, bool, error) { - var cells []string - noProps := true - for _, p := range t.Props { - if p == nil { - continue - } - noProps = false - if c, err := p.FormatValue(record); err != nil { - return "", noProps, fmt.Errorf("tag: %v, error: %v", *t, err) - } else { - cells = append(cells, c) - } - } - return strings.Join(cells, ","), noProps, nil -} - -func (t *Tag) validateAndReset(prefix string, start int) error { - if t.Name == nil { - return fmt.Errorf("Please configure the vertex tag name in: %s.name", prefix) - } - - for i := range t.Props { - if t.Props[i] != nil { - if err := t.Props[i].validateAndReset(fmt.Sprintf("%s.props[%d]", prefix, i), i+start); err != nil { - return err - } - } else { - logger.Log.Errorf("prop %d of tag %s is nil", i, *t.Name) - } + return nil, err } - return nil + defer f.Close() + return FromReader(f) } diff --git a/pkg/config/config_suite_test.go b/pkg/config/config_suite_test.go new file mode 100644 index 00000000..6e598893 --- /dev/null +++ b/pkg/config/config_suite_test.go @@ -0,0 +1,13 @@ +package config + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestConfig(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg config Suite") +} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 8e2a5fac..9f08e8b6 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -1,1192 +1,106 @@ package config import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - "testing" - "text/template" + "bytes" + stderrors "errors" - "github.com/stretchr/testify/assert" - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "gopkg.in/yaml.v2" + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + configv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/config/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "gopkg.in/yaml.v3" ) -func TestYAMLParser(t *testing.T) { - runnerLogger := logger.NewRunnerLogger("") - yamlConfig, err := Parse("../../examples/v2/example.yaml", runnerLogger) - if err != nil { - t.Fatal(err) - } - - for _, file := range yamlConfig.Files { - if strings.ToLower(*file.Type) != "csv" { - t.Fatal("Error file type") - } - switch strings.ToLower(*file.Schema.Type) { - case "edge": - case "vertex": - if file.Schema.Vertex == nil { - continue - } - if len(file.Schema.Vertex.Tags) == 0 && !*file.CSV.WithHeader { - t.Fatal("Empty tags in vertex") - } - default: - t.Fatal("Error schema type") - } - } -} - -type testYAML struct { - Version *string `yaml:"version"` - Files *[]struct { - Path *string `yaml:"path"` - } `yaml:"files"` -} - -var yamlStr string = ` -version: beta -files: - - path: ./file.csv -` - -func TestTypePointer(t *testing.T) { - ty := testYAML{} - if err := yaml.Unmarshal([]byte(yamlStr), &ty); err != nil { - t.Fatal(err) - } - t.Logf("yaml: %v, %v", *ty.Version, *ty.Files) -} - -var jsonStr = ` -{ - "version": "beta", - "files": [ - { "path": "/tmp" }, - { "path": "/etc" } - ] -} -` - -func TestJsonInYAML(t *testing.T) { - conf := YAMLConfig{} - if err := yaml.Unmarshal([]byte(jsonStr), &conf); err != nil { - t.Fatal(err) - } - - if conf.Version == nil || *conf.Version != "beta" { - t.Fatal("Error version") - } - - if conf.Files == nil || len(conf.Files) != 2 { - t.Fatal("Error files") - } - - paths := []string{"/tmp", "/etc"} - for i, p := range paths { - f := conf.Files[i] - if f == nil || f.Path == nil || *f.Path != p { - t.Fatalf("Error file %d path", i) - } - } -} - -type Person struct { - Name string `json:"name"` -} - -type Man struct { - Person - Age int `json:"age"` -} - -func TestJsonTypeEmbeding(t *testing.T) { - man := Man{ - Person: Person{Name: "zhangsan"}, - Age: 18, - } - t.Logf("%v", man) - b, _ := json.Marshal(man) - t.Logf("%s", string(b)) -} - -func TestParseVersion(t *testing.T) { - testcases := []struct { - version string - isError bool - }{ - { - version: "version: v1rc1", - isError: false, - }, - { - version: "version: v1rc2", - isError: false, - }, - { - version: "version: v1", - isError: false, - }, - { - version: "version: v2", - isError: false, - }, - { - version: "", - isError: true, - }, - { - version: "version: vx", - isError: true, - }, - } - - for _, tc := range testcases { - t.Run(tc.version, func(t *testing.T) { - ast := assert.New(t) - - tmpl, err := template.ParseFiles("testdata/test-parse-version.yaml") - ast.NoError(err) - - f, err := os.CreateTemp("testdata", ".test-parse-version.yaml") - ast.NoError(err) - filename := f.Name() - defer func() { - _ = f.Close() - _ = os.Remove(filename) - }() - - err = tmpl.ExecuteTemplate(f, "test-parse-version.yaml", map[string]string{ - "Version": tc.version, - }) - ast.NoError(err) - - _, err = Parse(filename, logger.NewRunnerLogger("")) - if tc.isError { - ast.Error(err) - } else { - ast.NoError(err) - } - }) - } -} - -func TestParseAfterPeriod(t *testing.T) { - testcases := []struct { - afterPeriod string - isError bool - }{ - { - afterPeriod: "", - isError: false, - }, - { - afterPeriod: "afterPeriod: 1s", - isError: false, - }, - { - afterPeriod: "afterPeriod: 1m", - isError: false, - }, - { - afterPeriod: "afterPeriod: 3m4s", - isError: false, - }, - { - afterPeriod: "afterPeriod: 1ss", - isError: true, - }, - } - - for _, tc := range testcases { - t.Run(tc.afterPeriod, func(t *testing.T) { - ast := assert.New(t) - - tmpl, err := template.ParseFiles("testdata/test-parse-after-period.yaml") - ast.NoError(err) - - f, err := os.CreateTemp("testdata", ".test-parse-after-period.yaml") - ast.NoError(err) - filename := f.Name() - defer func() { - _ = f.Close() - _ = os.Remove(filename) - }() - - err = tmpl.ExecuteTemplate(f, "test-parse-after-period.yaml", map[string]string{ - "AfterPeriod": tc.afterPeriod, - }) - ast.NoError(err) - - _, err = Parse(filename, logger.NewRunnerLogger("")) - if tc.isError { - ast.Error(err) - } else { - ast.NoError(err) - } - }) - } -} - -func TestParseLogPath(t *testing.T) { - tmpdir, err := os.MkdirTemp("", "test") - assert.NoError(t, err) - defer os.RemoveAll(tmpdir) - - testcases := []struct { - logPath string - isRelative bool - clean func() - }{ - { - logPath: "", - }, - { - logPath: "logPath: ./nebula-importer.log", - isRelative: true, - }, - { - logPath: "logPath: ./not-exists/nebula-importer.log", - isRelative: true, - }, - { - logPath: fmt.Sprintf("logPath: %s/nebula-importer.log", tmpdir), - }, - { - logPath: fmt.Sprintf("logPath: %s/not-exists/nebula-importer.log", tmpdir), - }, - } - - for _, tc := range testcases { - t.Run(tc.logPath, func(t *testing.T) { - ast := assert.New(t) - - tmpl, err := template.ParseFiles("testdata/test-parse-log-path.yaml") - ast.NoError(err) - - f, err := os.CreateTemp("testdata", ".test-parse-log-path.yaml") - ast.NoError(err) - filename := f.Name() - defer func() { - _ = f.Close() - _ = os.Remove(filename) - }() - - err = tmpl.ExecuteTemplate(f, "test-parse-log-path.yaml", map[string]string{ - "LogPath": tc.logPath, - }) - ast.NoError(err) - - c, err := Parse(filename, logger.NewRunnerLogger("")) - ast.NoError(err) - ast.NotNil(c.LogPath) - ast.Truef(filepath.IsAbs(*c.LogPath), "%s is abs path", *c.LogPath) - - logContent := []string{"first log", "second log"} - for i, s := range logContent { - runnerLogger := logger.NewRunnerLogger(*c.LogPath) - ast.FileExists(*c.LogPath) - runnerLogger.Error(s) - - // test first create and append - for j := 0; j <= i; j++ { - content, err := os.ReadFile(*c.LogPath) - ast.NoError(err) - ast.Contains(string(content), logContent[i]) - } - } - - if tc.isRelative { - removePath := *c.LogPath - if strings.Contains(*c.LogPath, "/not-exists/") { - removePath = filepath.Dir(removePath) - } - _ = os.RemoveAll(removePath) - } +var _ = Describe("FromFile", func() { + It("successfully v3", func() { + c1, err := FromFile("testdata/nebula-importer.v3.yaml") + Expect(err).NotTo(HaveOccurred()) + + cv3, ok := c1.(*configv3.Config) + Expect(ok).To(BeTrue()) + Expect(cv3).NotTo(BeNil()) + + Expect(cv3.Client.Version).To(Equal(configbase.ClientVersion3)) + Expect(cv3.Log.Files).To(HaveLen(1)) + Expect(cv3.Manager.GraphName).To(Equal("graphName")) + Expect(cv3.Manager.GraphName).To(Equal("graphName")) + Expect(cv3.Sources).To(HaveLen(3)) + Expect(cv3.Sources[0].Nodes).To(HaveLen(2)) + Expect(cv3.Sources[0].Edges).To(HaveLen(0)) + Expect(cv3.Sources[1].Nodes).To(HaveLen(0)) + Expect(cv3.Sources[1].Edges).To(HaveLen(2)) + Expect(cv3.Sources[2].Nodes).To(HaveLen(2)) + Expect(cv3.Sources[2].Edges).To(HaveLen(2)) + + content, err := yaml.Marshal(c1) + Expect(err).NotTo(HaveOccurred()) + Expect(content).NotTo(BeEmpty()) + + c2, err := FromBytes(content) + Expect(err).NotTo(HaveOccurred()) + Expect(c2).To(Equal(c1)) + + c3, err := FromReader(bytes.NewReader(content)) + Expect(err).NotTo(HaveOccurred()) + Expect(c3).To(Equal(c1)) + }) + + It("configuration file not exists", func() { + c, err := FromFile("testdata/not-exists.yaml") + Expect(err).To(HaveOccurred()) + Expect(c).To(BeNil()) + }) +}) + +var _ = Describe("FromBytes", func() { + It("Unmarshal failed 1", func() { + c, err := FromBytes([]byte(` +client: + version: : v + +`)) + Expect(err).To(HaveOccurred()) + Expect(c).To(BeNil()) + }) + + It("Unmarshal failed 2", func() { + c, err := FromBytes([]byte(` +client: + version: v3 +log: + files: "" +`)) + Expect(err).To(HaveOccurred()) + Expect(c).To(BeNil()) + }) + + It("unsupported client version failed", func() { + c, err := FromBytes([]byte(` +client: + version: v +`)) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedClientVersion)).To(BeTrue()) + Expect(c).To(BeNil()) + }) +}) + +type testErrorReader struct { + err error +} + +func (r testErrorReader) Read([]byte) (n int, err error) { + return 0, r.err +} + +var _ = Describe("FromBytes", func() { + It("Unmarshal failed 1", func() { + c, err := FromReader(testErrorReader{ + err: stderrors.New("read failed"), }) - } -} - -func TestParseConcatItems(t *testing.T) { - testcases := []struct { - concatItems string - fnCheck func(ast *assert.Assertions, concatItems []interface{}) - }{ - { - concatItems: "", - fnCheck: func(ast *assert.Assertions, concatItems []interface{}) { - ast.Len(concatItems, 0) - }, - }, - { - concatItems: "concatItems: [\"c1\"]", - fnCheck: func(ast *assert.Assertions, concatItems []interface{}) { - if ast.Len(concatItems, 1) { - ast.Equal(concatItems[0], "c1") - } - }, - }, - { - concatItems: "concatItems: [3]", - fnCheck: func(ast *assert.Assertions, concatItems []interface{}) { - if ast.Len(concatItems, 1) { - ast.Equal(concatItems[0], 3) - } - }, - }, - { - concatItems: "concatItems: [3, \"c1\", 1, \"c2\", 2]", - fnCheck: func(ast *assert.Assertions, concatItems []interface{}) { - if ast.Len(concatItems, 5) { - ast.Equal(concatItems[0], 3) - ast.Equal(concatItems[1], "c1") - ast.Equal(concatItems[2], 1) - ast.Equal(concatItems[3], "c2") - ast.Equal(concatItems[4], 2) - } - }, - }, - { - concatItems: "concatItems: [\"c1\", 3, \"c2\", 1, \"2\"]", - fnCheck: func(ast *assert.Assertions, concatItems []interface{}) { - if ast.Len(concatItems, 5) { - ast.Equal(concatItems[0], "c1") - ast.Equal(concatItems[1], 3) - ast.Equal(concatItems[2], "c2") - ast.Equal(concatItems[3], 1) - ast.Equal(concatItems[4], "2") - } - }, - }, - } - - for _, tc := range testcases { - t.Run(tc.concatItems, func(t *testing.T) { - ast := assert.New(t) - - tmpl, err := template.ParseFiles("testdata/test-parse-concat-items.yaml") - ast.NoError(err) - - f, err := os.CreateTemp("testdata", ".test-parse-concat-items.yaml") - ast.NoError(err) - filename := f.Name() - defer func() { - _ = f.Close() - _ = os.Remove(filename) - }() - - err = tmpl.ExecuteTemplate(f, "test-parse-concat-items.yaml", map[string]string{ - "ConcatItems": tc.concatItems, - }) - ast.NoError(err) - - c, err := Parse(filename, logger.NewRunnerLogger("")) - if ast.NoError(err) { - tc.fnCheck(ast, c.Files[0].Schema.Edge.SrcVID.ConcatItems) - } - }) - } -} - -func TestParseNoFiles(t *testing.T) { - _, err := Parse("./testdata/test-parse-no-files.yaml", logger.NewRunnerLogger("")) - assert.Error(t, err) - assert.Contains(t, err.Error(), "no files") -} - -func TestVidType(t *testing.T) { - testcases := []struct { - typ string - isSupport bool - }{ - { - typ: "int", - isSupport: true, - }, - { - typ: "INT", - isSupport: true, - }, - { - typ: "iNt", - isSupport: true, - }, - { - typ: " iNt ", - isSupport: true, - }, - { - typ: "string", - isSupport: true, - }, - { - typ: "aaa", - isSupport: false, - }, - } - - for _, tc := range testcases { - t.Run(tc.typ, func(t *testing.T) { - ast := assert.New(t) - vid := VID{ - Type: &tc.typ, - } - err := vid.validateAndReset("", 0) - if tc.isSupport { - ast.NoError(err) - } else { - ast.Error(err) - ast.Contains(err.Error(), "vid type must be") - } - }) - } -} - -func TestVidFormatValue(t *testing.T) { - var ( - idx0 = 0 - idx1 = 1 - fHash = "hash" - tInt = "int" - tString = "string" - prefix = "p_" - ) - testcases := []struct { - name string - vid VID - record base.Record - want string - wantErrString string - }{ - { - name: "index out of range", - vid: VID{ - Index: &idx1, - Type: &tString, - }, - want: "", - record: base.Record{""}, - wantErrString: "out range", - }, - { - name: "type string", - vid: VID{ - Index: &idx0, - Type: &tString, - }, - record: base.Record{"str"}, - want: "\"str\"", - }, - { - name: "type int", - vid: VID{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"1"}, - want: "1", - }, - { - name: "type int d", - vid: VID{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"1"}, - want: "1", - }, - { - name: "type int 0d", - vid: VID{ - Index: &idx1, - Type: &tInt, - }, - record: base.Record{"", "070"}, - want: "070", - }, - { - name: "type int 0x", - vid: VID{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"0x0F"}, - want: "0x0F", - }, - { - name: "type int 0X", - vid: VID{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"0XF0"}, - want: "0XF0", - }, - { - name: "type int format err", - vid: VID{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"F0"}, - want: "", - wantErrString: "Invalid vid format", - }, - { - name: "function hash", - vid: VID{ - Index: &idx0, - Type: &tString, - Function: &fHash, - }, - record: base.Record{"str"}, - want: "hash(\"str\")", - }, - { - name: "prefix", - vid: VID{ - Index: &idx0, - Type: &tString, - Prefix: &prefix, - }, - record: base.Record{"str"}, - want: prefix + "str", - }, - } - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ast := assert.New(t) - - ast.NoError(tc.vid.InitPicker()) - - str, err := tc.vid.FormatValue(tc.record) - if tc.wantErrString != "" { - ast.Error(err) - ast.Contains(err.Error(), tc.wantErrString) - } else { - ast.NoError(err) - ast.Contains(str, tc.want) - } - }) - } -} - -func TestPropType(t *testing.T) { - testcases := []struct { - typ string - isSupport bool - }{ - { - typ: "int", - isSupport: true, - }, - { - typ: "INT", - isSupport: true, - }, - { - typ: "iNt", - isSupport: true, - }, - { - typ: "string", - isSupport: true, - }, - { - typ: "float", - isSupport: true, - }, - { - typ: "double", - isSupport: true, - }, - { - typ: "bool", - isSupport: true, - }, - { - typ: "date", - isSupport: true, - }, - { - typ: "time", - isSupport: true, - }, - { - typ: "datetime", - isSupport: true, - }, - { - typ: "timestamp", - isSupport: true, - }, - { - typ: "geography", - isSupport: true, - }, - { - typ: "geography(point)", - isSupport: true, - }, - { - typ: "geography(linestring)", - isSupport: true, - }, - { - typ: "geography(polygon)", - isSupport: true, - }, - { - typ: "aaa", - isSupport: false, - }, - } - - for _, tc := range testcases { - t.Run(tc.typ, func(t *testing.T) { - ast := assert.New(t) - prop := Prop{ - Type: &tc.typ, - } - err := prop.validateAndReset("", 0) - if tc.isSupport { - ast.NoError(err) - } else { - ast.Error(err) - ast.Contains(err.Error(), "Error property type") - } - }) - } -} - -func TestPropFormatValue(t *testing.T) { - var ( - idx0 = 0 - idx1 = 1 - vZero = "0" - tBool = "bool" - tInt = "int" - tFloat = "float" - tDouble = "double" - tString = "string" - tTime = "time" - tTimestamp = "timestamp" - tDate = "date" - tDatetime = "datetime" - tGeography = "geography" - tGeographyPoint = "geography(point)" - tGeographyLineString = "geography(linestring)" - tGeographyPolygon = "geography(polygon)" - ) - - testcases := []struct { - name string - prop Prop - record base.Record - want string - wantErrString string - }{ - { - name: "index out of range", - prop: Prop{ - Index: &idx1, - Type: &tString, - }, - want: "", - record: base.Record{""}, - wantErrString: "out range", - }, - { - name: "type bool", - prop: Prop{ - Index: &idx0, - Type: &tBool, - }, - record: base.Record{"false"}, - want: "false", - }, - { - name: "type bool null", - prop: Prop{ - Index: &idx0, - Type: &tBool, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type int", - prop: Prop{ - Index: &idx0, - Type: &tInt, - }, - record: base.Record{"1"}, - want: "1", - }, - { - name: "type int null", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type float", - prop: Prop{ - Index: &idx0, - Type: &tFloat, - }, - record: base.Record{"1.1"}, - want: "1.1", - }, - { - name: "type float null", - prop: Prop{ - Index: &idx0, - Type: &tFloat, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type double", - prop: Prop{ - Index: &idx0, - Type: &tDouble, - }, - record: base.Record{"2.2"}, - want: "2.2", - }, - { - name: "type double null", - prop: Prop{ - Index: &idx0, - Type: &tDouble, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type string", - prop: Prop{ - Index: &idx0, - Type: &tString, - }, - record: base.Record{"str"}, - want: "\"str\"", - }, - { - name: "type string null", - prop: Prop{ - Index: &idx0, - Type: &tString, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type string null value", - prop: Prop{ - Index: &idx0, - Type: &tString, - Nullable: true, - NullValue: "__NULL__", - }, - record: base.Record{"__NULL__"}, - want: dbNULL, - }, - { - name: "type time", - prop: Prop{ - Index: &idx0, - Type: &tTime, - }, - record: base.Record{"18:38:23.284"}, - want: "TIME(\"18:38:23.284\")", - }, - { - name: "type time null", - prop: Prop{ - Index: &idx0, - Type: &tTime, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type timestamp", - prop: Prop{ - Index: &idx0, - Type: &tTimestamp, - }, - record: base.Record{"2020-01-11T19:28:23"}, - want: "TIMESTAMP(\"2020-01-11T19:28:23\")", - }, - { - name: "type timestamp integer", - prop: Prop{ - Index: &idx0, - Type: &tTimestamp, - }, - record: base.Record{"1578770903"}, - want: "TIMESTAMP(1578770903)", - }, - { - name: "type timestamp integer", - prop: Prop{ - Index: &idx0, - Type: &tTimestamp, - }, - record: base.Record{"0123"}, - want: "TIMESTAMP(0123)", - }, - { - name: "type timestamp integer", - prop: Prop{ - Index: &idx0, - Type: &tTimestamp, - }, - record: base.Record{"0XF0"}, - want: "TIMESTAMP(0XF0)", - }, - { - name: "type timestamp null", - prop: Prop{ - Index: &idx0, - Type: &tTimestamp, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type date", - prop: Prop{ - Index: &idx0, - Type: &tDate, - }, - record: base.Record{"2020-01-02"}, - want: "DATE(\"2020-01-02\")", - }, - { - name: "type date null", - prop: Prop{ - Index: &idx0, - Type: &tDate, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type datetime", - prop: Prop{ - Index: &idx0, - Type: &tDatetime, - }, - record: base.Record{"2020-01-11T19:28:23.284"}, - want: "DATETIME(\"2020-01-11T19:28:23.284\")", - }, - { - name: "type datetime null", - prop: Prop{ - Index: &idx0, - Type: &tDatetime, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type geography", - prop: Prop{ - Index: &idx0, - Type: &tGeography, - }, - record: base.Record{"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))"}, - want: "ST_GeogFromText(\"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))\")", - }, - { - name: "type geography null", - prop: Prop{ - Index: &idx0, - Type: &tGeography, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type geography(point)", - prop: Prop{ - Index: &idx0, - Type: &tGeographyPoint, - }, - record: base.Record{"Point(0.0 0.0)"}, - want: "ST_GeogFromText(\"Point(0.0 0.0)\")", - }, - { - name: "type geography(point) null", - prop: Prop{ - Index: &idx0, - Type: &tGeographyPoint, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type geography(linestring)", - prop: Prop{ - Index: &idx0, - Type: &tGeographyLineString, - }, - record: base.Record{"linestring(0 1, 179.99 89.99)"}, - want: "ST_GeogFromText(\"linestring(0 1, 179.99 89.99)\")", - }, - { - name: "type geography(linestring) null", - prop: Prop{ - Index: &idx0, - Type: &tGeographyLineString, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "type geography(polygon)", - prop: Prop{ - Index: &idx0, - Type: &tGeographyPolygon, - }, - record: base.Record{"polygon((0 1, 2 4, 3 5, 4 9, 0 1))"}, - want: "ST_GeogFromText(\"polygon((0 1, 2 4, 3 5, 4 9, 0 1))\")", - }, - { - name: "type geography(polygon) null", - prop: Prop{ - Index: &idx0, - Type: &tGeographyPolygon, - Nullable: true, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "alternative indices 0", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{}, - }, - record: base.Record{""}, - want: dbNULL, - }, - { - name: "alternative indices 1 out range", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{1}, - }, - record: base.Record{""}, - wantErrString: "out range", - }, - { - name: "alternative indices 1 use index", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{1}, - }, - record: base.Record{"1"}, - want: "1", - }, - { - name: "alternative indices 1 null", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{1}, - }, - record: base.Record{"", ""}, - want: dbNULL, - }, - { - name: "alternative indices 1 not null", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{2}, - }, - record: base.Record{"", "1", "2"}, - want: "2", - }, - { - name: "alternative indices n not null", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{3, 2, 1}, - }, - record: base.Record{"", "1", "2", ""}, - want: "2", - }, - { - name: "default value not nullable", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: false, - AlternativeIndices: []int{1}, - DefaultValue: &vZero, - }, - record: base.Record{"", "1", "2"}, - want: "", - }, - { - name: "default value nullable", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - DefaultValue: &vZero, - }, - record: base.Record{""}, - want: "0", - }, - { - name: "default value nullable alternative indices", - prop: Prop{ - Index: &idx0, - Type: &tInt, - Nullable: true, - AlternativeIndices: []int{1, 2, 3, 4, 5, 6}, - DefaultValue: &vZero, - }, - record: base.Record{"", "", "", "", "", "", ""}, - want: "0", - }, - } - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ast := assert.New(t) - - ast.NoError(tc.prop.InitPicker()) - - str, err := tc.prop.FormatValue(tc.record) - if tc.wantErrString != "" { - ast.Error(err) - ast.Contains(err.Error(), tc.wantErrString) - } else { - ast.NoError(err) - ast.Equal(str, tc.want) - } - }) - } -} - -func TestParseFunction(t *testing.T) { - var ( - tString = "string" - tInt = "int" - fHash = "hash" - prefix = "prefix" - ) - testcases := []struct { - str string - vid VID - isSupport bool - }{ - { - str: ":VID", - vid: VID{ - Type: &tString, - }, - isSupport: true, - }, - { - str: ":VID(string)", - vid: VID{ - Type: &tString, - }, - isSupport: true, - }, - { - str: ":VID(int)", - vid: VID{ - Type: &tInt, - }, - isSupport: true, - }, - { - str: ":VID(hash+int)", - vid: VID{ - Function: &fHash, - Type: &tInt, - }, - isSupport: true, - }, - { - str: ":VID(hash+int+prefix)", - vid: VID{ - Function: &fHash, - Type: &tInt, - Prefix: &prefix, - }, - isSupport: true, - }, - { - str: ":VID(", - isSupport: false, - }, - { - str: ":VID)int(", - isSupport: false, - }, - } - - for _, tc := range testcases { - t.Run(tc.str, func(t *testing.T) { - ast := assert.New(t) - vid := VID{} - err := vid.ParseFunction(tc.str) - if tc.isSupport { - ast.NoError(err) - ast.Equal(vid, tc.vid) - } else { - ast.Error(err) - ast.Contains(err.Error(), "Invalid function format") - } - }) - } -} - -func Benchmark_checkVidFormat(b *testing.B) { - for i := 0; i < b.N; i++ { - _ = checkVidFormat("-0xfedcba9876543210", true) - _ = checkVidFormat("-076543210", true) - _ = checkVidFormat("-9876543210", true) - _ = checkVidFormat("hash(\"abcdefg\")", true) - } -} + Expect(err).To(HaveOccurred()) + Expect(c).To(BeNil()) + }) +}) diff --git a/pkg/config/testdata/nebula-importer.v3.yaml b/pkg/config/testdata/nebula-importer.v3.yaml new file mode 100644 index 00000000..43673fd6 --- /dev/null +++ b/pkg/config/testdata/nebula-importer.v3.yaml @@ -0,0 +1,143 @@ +client: + version: v3 + address: "127.0.0.1:0" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: graphName + batch: 100 + readerConcurrency: 10 + importerConcurrency: 10 + statsInterval: 10s + hooks: + before: + - statements: + - statement1 + - statements: + - statement2 + wait: 1ms + +sources: + - path: ./node1.csv + tags: + - name: node1 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + - name: node2 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + + - path: ./edge1.csv + edges: + - name: edge1 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "prop1" + type: "STRING" + index: 2 + - name: edge2 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + props: + - name: "prop1" + type: "STRING" + index: 2 + + - path: ./node1.csv + tags: + - name: node1 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + - name: node2 + id: + type: "INT" + index: 0 + props: + - name: "prop1" + type: "STRING" + index: 1 + - name: "prop2" + type: "int" + index: 2 + - name: "prop3" + type: "Double" + index: 3 + edges: + - name: edge1 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + - name: edge2 + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + +log: + level: INFO + console: true + files: + - nebula-importer.log + fields: + - key: app + value: nebula-importer diff --git a/pkg/config/testdata/test-parse-after-period.yaml b/pkg/config/testdata/test-parse-after-period.yaml deleted file mode 100644 index bd060eb8..00000000 --- a/pkg/config/testdata/test-parse-after-period.yaml +++ /dev/null @@ -1,31 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test - connection: - user: root - password: nebula - address: 127.0.0.1:9669 - postStart: - commands: SHOW HOSTS - {{ .AfterPeriod }} -files: - - path: ../../../examples/v2/choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - props: - - name: grade - type: int diff --git a/pkg/config/testdata/test-parse-concat-items.yaml b/pkg/config/testdata/test-parse-concat-items.yaml deleted file mode 100644 index f3c45c11..00000000 --- a/pkg/config/testdata/test-parse-concat-items.yaml +++ /dev/null @@ -1,31 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test - connection: - user: root - password: nebula - address: 127.0.0.1:9669 -files: - - path: ../../../examples/v2/choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - srcVID: - {{ .ConcatItems }} - type: string - name: choose - withRanking: false - props: - - name: grade - type: int diff --git a/pkg/config/testdata/test-parse-log-path.yaml b/pkg/config/testdata/test-parse-log-path.yaml deleted file mode 100644 index f759f90b..00000000 --- a/pkg/config/testdata/test-parse-log-path.yaml +++ /dev/null @@ -1,29 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test - connection: - user: root - password: nebula - address: 127.0.0.1:9669 -{{ .LogPath }} -files: - - path: ../../../examples/v2/choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - props: - - name: grade - type: int diff --git a/pkg/config/testdata/test-parse-no-files.yaml b/pkg/config/testdata/test-parse-no-files.yaml deleted file mode 100644 index 422d3082..00000000 --- a/pkg/config/testdata/test-parse-no-files.yaml +++ /dev/null @@ -1,12 +0,0 @@ -version: v2 -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test - connection: - user: root - password: nebula - address: 127.0.0.1:9669 diff --git a/pkg/config/testdata/test-parse-version.yaml b/pkg/config/testdata/test-parse-version.yaml deleted file mode 100644 index 58d3a312..00000000 --- a/pkg/config/testdata/test-parse-version.yaml +++ /dev/null @@ -1,28 +0,0 @@ -{{ .Version }} -description: example -removeTempFiles: false -clientSettings: - retry: 3 - concurrency: 2 # number of graph clients - channelBufferSize: 1 - space: importer_test - connection: - user: root - password: nebula - address: 127.0.0.1:9669 -files: - - path: ../../../examples/v2/choose.csv - batchSize: 2 - inOrder: false - type: csv - csv: - withHeader: false - withLabel: false - schema: - type: edge - edge: - name: choose - withRanking: false - props: - - name: grade - type: int diff --git a/pkg/config/v3/config.go b/pkg/config/v3/config.go new file mode 100644 index 00000000..30c70990 --- /dev/null +++ b/pkg/config/v3/config.go @@ -0,0 +1,112 @@ +package configv3 + +import ( + "fmt" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +var _ configbase.Configurator = (*Config)(nil) + +type ( + Client = configbase.Client + Log = configbase.Log + + Config struct { + Client `yaml:"client"` + Manager `yaml:"manager"` + Sources `yaml:"sources"` + *Log `yaml:"log,omitempty"` + + logger logger.Logger + pool client.Pool + mgr manager.Manager + } +) + +func (c *Config) Optimize(configPath string) error { + if err := c.Log.OptimizeFiles(configPath); err != nil { + return err + } + + if err := c.Sources.OptimizePath(configPath); err != nil { + return err + } + + //revive:disable-next-line:if-return + if err := c.Sources.OptimizePathWildCard(); err != nil { + return err + } + + return nil +} + +func (c *Config) Build() error { + var ( + err error + l logger.Logger + pool client.Pool + mgr manager.Manager + ) + defer func() { + if err != nil { + if pool != nil { + _ = pool.Close() + } + if l != nil { + _ = l.Close() + } + } + }() + + l, err = c.BuildLogger() + if err != nil { + return err + } + pool, err = c.BuildClientPool( + client.WithLogger(l), + client.WithClientInitFunc(c.clientInitFunc), + ) + if err != nil { + return err + } + mgr, err = c.Manager.BuildManager(l, pool, c.Sources, + manager.WithGetClientOptions(client.WithClientInitFunc(nil)), // clean the USE SPACE in 3.x + ) + if err != nil { + return err + } + + c.logger = l + c.pool = pool + c.mgr = mgr + + return nil +} + +func (c *Config) GetLogger() logger.Logger { + return c.logger +} + +func (c *Config) GetClientPool() client.Pool { + return c.pool +} + +func (c *Config) GetManager() manager.Manager { + return c.mgr +} + +func (c *Config) clientInitFunc(cli client.Client) error { + resp, err := cli.Execute(fmt.Sprintf("USE %s", utils.ConvertIdentifier(c.Manager.GraphName))) + if err != nil { + return err + } + if !resp.IsSucceed() { + return resp.GetError() + } + return nil +} diff --git a/pkg/config/v3/config_suite_test.go b/pkg/config/v3/config_suite_test.go new file mode 100644 index 00000000..c4036acf --- /dev/null +++ b/pkg/config/v3/config_suite_test.go @@ -0,0 +1,13 @@ +package configv3 + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestConfig(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg config v3 Suite") +} diff --git a/pkg/config/v3/config_test.go b/pkg/config/v3/config_test.go new file mode 100644 index 00000000..835a5857 --- /dev/null +++ b/pkg/config/v3/config_test.go @@ -0,0 +1,146 @@ +package configv3 + +import ( + stderrors "errors" + "path/filepath" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + specv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/v3" + + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Config", func() { + Describe(".Optimize", func() { + It("c.Sources.OptimizePathWildCard failed", func() { + c := &Config{ + Sources: Sources{ + Source{ + Source: configbase.Source{ + SourceConfig: source.Config{ + Local: &source.LocalConfig{ + Path: "[a-b", + }, + }, + }, + }, + }, + } + Expect(c.Optimize(".")).To(HaveOccurred()) + }) + + It("successfully", func() { + c := &Config{ + Sources: Sources{ + Source{ + Source: configbase.Source{ + SourceConfig: source.Config{ + Local: &source.LocalConfig{ + Path: filepath.Join("testdata", "file*"), + }, + }, + }, + }, + }, + } + Expect(c.Optimize(".")).NotTo(HaveOccurred()) + }) + }) + + Describe(".Build", func() { + var c Config + BeforeEach(func() { + c = Config{ + Manager: Manager{ + GraphName: "graphName", + }, + Sources: Sources{ + { + Source: configbase.Source{ + SourceConfig: source.Config{ + Local: &source.LocalConfig{ + Path: filepath.Join("testdata", "file10"), + }, + }, + }, + Nodes: specv3.Nodes{ + &specv3.Node{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + }, + }, + }, + } + }) + + It("BuildLogger failed", func() { + c.Log = &Log{ + Files: []string{filepath.Join("testdata", "not-exists", "1.log")}, + } + Expect(c.Build()).To(HaveOccurred()) + }) + + It("BuildClientPool failed", func() { + c.Client.Version = "v" + Expect(c.Build()).To(HaveOccurred()) + }) + + It("BuildManager failed", func() { + c.Manager.GraphName = "" + Expect(c.Build()).To(HaveOccurred()) + }) + + It("successfully", func() { + Expect(c.Build()).NotTo(HaveOccurred()) + Expect(c.GetLogger()).NotTo(BeNil()) + Expect(c.GetClientPool()).NotTo(BeNil()) + Expect(c.GetManager()).NotTo(BeNil()) + }) + }) +}) + +var _ = Describe("clientInitFunc", func() { + var ( + c Config + ctrl *gomock.Controller + mockClient *client.MockClient + mockResponse *client.MockResponse + ) + + BeforeEach(func() { + c.Manager.GraphName = "graphName" + ctrl = gomock.NewController(GinkgoT()) + mockClient = client.NewMockClient(ctrl) + mockResponse = client.NewMockResponse(ctrl) + }) + AfterEach(func() { + ctrl.Finish() + }) + + It("Execute failed", func() { + mockClient.EXPECT().Execute("USE `graphName`").Return(nil, stderrors.New("execute error")) + Expect(c.clientInitFunc(mockClient)).To(HaveOccurred()) + }) + + It("Execute IsSucceed false", func() { + mockClient.EXPECT().Execute("USE `graphName`").Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Return(false) + mockResponse.EXPECT().GetError().Return(stderrors.New("execute error")) + Expect(c.clientInitFunc(mockClient)).To(HaveOccurred()) + }) + + It("successfully", func() { + mockClient.EXPECT().Execute("USE `graphName`").Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Return(true) + Expect(c.clientInitFunc(mockClient)).NotTo(HaveOccurred()) + }) +}) diff --git a/pkg/config/v3/manager.go b/pkg/config/v3/manager.go new file mode 100644 index 00000000..8870a518 --- /dev/null +++ b/pkg/config/v3/manager.go @@ -0,0 +1,56 @@ +package configv3 + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" + "github.com/vesoft-inc/nebula-importer/v4/pkg/reader" +) + +type ( + Manager struct { + GraphName string `yaml:"spaceName"` + configbase.Manager `yaml:",inline"` + } +) + +func (m *Manager) BuildManager( + l logger.Logger, + pool client.Pool, + sources Sources, + opts ...manager.Option, +) (manager.Manager, error) { + options := make([]manager.Option, 0, 8+len(opts)) + options = append(options, + manager.WithClientPool(pool), + manager.WithBatch(m.Batch), + manager.WithReaderConcurrency(m.ReaderConcurrency), + manager.WithImporterConcurrency(m.ImporterConcurrency), + manager.WithStatsInterval(m.StatsInterval), + manager.WithBeforeHooks(m.Hooks.Before...), + manager.WithAfterHooks(m.Hooks.After...), + manager.WithLogger(l), + ) + options = append(options, opts...) + + mgr := manager.NewWithOpts(options...) + + for i := range sources { + s := sources[i] + src, brr, err := s.BuildSourceAndReader(reader.WithBatch(m.Batch), reader.WithLogger(l)) + if err != nil { + return nil, err + } + + importers, err := s.BuildImporters(m.GraphName, pool) + if err != nil { + return nil, err + } + if err = mgr.Import(src, brr, importers...); err != nil { + return nil, err + } + } + + return mgr, nil +} diff --git a/pkg/config/v3/manager_test.go b/pkg/config/v3/manager_test.go new file mode 100644 index 00000000..a6d165fc --- /dev/null +++ b/pkg/config/v3/manager_test.go @@ -0,0 +1,60 @@ +package configv3 + +import ( + "path/filepath" + + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + specv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/v3" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Manager", func() { + Describe(".BuildManager", func() { + var c Config + BeforeEach(func() { + c = Config{ + Manager: Manager{ + GraphName: "graphName", + }, + Sources: Sources{ + { + Source: configbase.Source{ + SourceConfig: source.Config{ + Local: &source.LocalConfig{ + Path: filepath.Join("testdata", "file10"), + }, + }, + }, + Nodes: specv3.Nodes{ + &specv3.Node{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + }, + }, + }, + } + }) + + It("BuildImporters failed", func() { + c.Manager.GraphName = "" + Expect(c.Build()).To(HaveOccurred()) + }) + + It("Importer failed", func() { + c.Sources[0].SourceConfig.Local.Path = filepath.Join("testdata", "not-exists.csv") + Expect(c.Build()).To(HaveOccurred()) + }) + + It("successfully", func() { + Expect(c.Build()).NotTo(HaveOccurred()) + }) + }) +}) diff --git a/pkg/config/v3/source.go b/pkg/config/v3/source.go new file mode 100644 index 00000000..b69cbf00 --- /dev/null +++ b/pkg/config/v3/source.go @@ -0,0 +1,103 @@ +package configv3 + +import ( + "io/fs" + "os" + "path/filepath" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + configbase "github.com/vesoft-inc/nebula-importer/v4/pkg/config/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/importer" + specv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/v3" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +type ( + Source struct { + configbase.Source `yaml:",inline"` + Nodes specv3.Nodes `yaml:"tags,omitempty"` + Edges specv3.Edges `yaml:"edges,omitempty"` + } + + Sources []Source +) + +func (s *Source) BuildGraph(graphName string, opts ...specv3.GraphOption) (*specv3.Graph, error) { + options := make([]specv3.GraphOption, 0, len(s.Nodes)+len(s.Edges)+len(opts)) + for i := range s.Nodes { + node := s.Nodes[i] + options = append(options, specv3.WithGraphNodes(node)) + } + for i := range s.Edges { + edge := s.Edges[i] + options = append(options, specv3.WithGraphEdges(edge)) + } + options = append(options, opts...) + graph := specv3.NewGraph(graphName, options...) + graph.Complete() + if err := graph.Validate(); err != nil { + return nil, err + } + return graph, nil +} + +func (s *Source) BuildImporters(graphName string, pool client.Pool) ([]importer.Importer, error) { + graph, err := s.BuildGraph(graphName) + if err != nil { + return nil, err + } + importers := make([]importer.Importer, 0, len(s.Nodes)+len(s.Edges)) + for k := range s.Nodes { + node := s.Nodes[k] + builder := graph.InsertNodeBuilder(node) + i := importer.New(builder, pool) + importers = append(importers, i) + } + + for k := range s.Edges { + edge := s.Edges[k] + builder := graph.InsertEdgeBuilder(edge) + i := importer.New(builder, pool) + importers = append(importers, i) + } + return importers, nil +} + +// OptimizePath optimizes relative paths base to the configuration file path +func (ss Sources) OptimizePath(configPath string) error { + configPathDir := filepath.Dir(configPath) + for i := range ss { + if ss[i].SourceConfig.Local != nil { + ss[i].SourceConfig.Local.Path = utils.RelativePathBaseOn(configPathDir, ss[i].SourceConfig.Local.Path) + } + } + return nil +} + +// OptimizePathWildCard optimizes the wildcards in the paths +func (ss *Sources) OptimizePathWildCard() error { + nss := make(Sources, 0, len(*ss)) + for i := range *ss { + if (*ss)[i].SourceConfig.Local != nil { + paths, err := filepath.Glob((*ss)[i].SourceConfig.Local.Path) + if err != nil { + return err + } + if len(paths) == 0 { + return &os.PathError{Op: "open", Path: (*ss)[i].SourceConfig.Local.Path, Err: fs.ErrNotExist} + } + + for _, path := range paths { + cpy := (*ss)[i] + cpySourceConfig := cpy.SourceConfig.Clone() + cpy.SourceConfig = *cpySourceConfig + cpy.SourceConfig.Local.Path = path + nss = append(nss, cpy) + } + } else { + nss = append(nss, (*ss)[i]) + } + } + *ss = nss + return nil +} diff --git a/pkg/config/v3/source_test.go b/pkg/config/v3/source_test.go new file mode 100644 index 00000000..19d6040f --- /dev/null +++ b/pkg/config/v3/source_test.go @@ -0,0 +1,287 @@ +package configv3 + +import ( + "os" + "path/filepath" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + specv3 "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/v3" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Source", func() { + Describe(".BuildGraph", func() { + It("Validate failed", func() { + s := &Source{} + graph, err := s.BuildGraph("") + Expect(err).To(HaveOccurred()) + Expect(graph).To(BeNil()) + }) + + It("successfully", func() { + s := &Source{ + Nodes: specv3.Nodes{ + &specv3.Node{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + &specv3.Node{ + Name: "n2", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + }, + Edges: specv3.Edges{ + &specv3.Edge{ + Name: "e1", + Src: &specv3.EdgeNodeRef{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + Dst: &specv3.EdgeNodeRef{ + Name: "n2", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 1, + }, + }, + }, + }, + } + graph, err := s.BuildGraph("graphName") + Expect(err).NotTo(HaveOccurred()) + Expect(graph).NotTo(BeNil()) + }) + }) + + Describe(".BuildImporters", func() { + It("BuildGraph failed", func() { + s := &Source{} + importers, err := s.BuildImporters("", nil) + Expect(err).To(HaveOccurred()) + Expect(importers).To(BeNil()) + }) + + It("successfully", func() { + s := &Source{ + Nodes: specv3.Nodes{ + &specv3.Node{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + &specv3.Node{ + Name: "n2", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 1, + }, + }, + }, + Edges: specv3.Edges{ + &specv3.Edge{ + Name: "e1", + Src: &specv3.EdgeNodeRef{ + Name: "n1", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 0, + }, + }, + Dst: &specv3.EdgeNodeRef{ + Name: "n2", + ID: &specv3.NodeID{ + Name: "id", + Type: specv3.ValueTypeString, + Index: 1, + }, + }, + }, + }, + } + + importers, err := s.BuildImporters("graphName", nil) + Expect(err).NotTo(HaveOccurred()) + Expect(importers).To(HaveLen(3)) + }) + }) +}) + +var _ = Describe("Sources", func() { + DescribeTable(".OptimizePath", + func(configPath string, files, expectFiles []string) { + var sources Sources + if files != nil { + sources = make(Sources, len(files)) + } + for i, file := range files { + sources[i].SourceConfig.Local = &source.LocalConfig{ + Path: file, + } + } + Expect(sources.OptimizePath(configPath)).NotTo(HaveOccurred()) + var sourcePaths []string + if sources != nil { + sourcePaths = make([]string, len(sources)) + for i := range sources { + sourcePaths[i] = sources[i].SourceConfig.Local.Path + } + } + Expect(sourcePaths).To(Equal(expectFiles)) + }, + EntryDescription("%[1]s : %[2]v => %[3]v"), + + Entry(nil, "f.yaml", nil, nil), + Entry(nil, "./f.yaml", []string{"1.csv"}, []string{"1.csv"}), + Entry(nil, "f.yaml", []string{"1.csv", "2.csv"}, []string{"1.csv", "2.csv"}), + Entry(nil, "./f.yaml", []string{"d10/1.csv", "./d20/2.csv"}, []string{"d10/1.csv", "d20/2.csv"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"1.csv"}, []string{"d1/1.csv"}), + Entry(nil, "./d1/f.yaml", []string{"1.csv", "2.csv"}, []string{"d1/1.csv", "d1/2.csv"}), + Entry(nil, "d1/f.yaml", []string{"d10/1.csv", "./d20/2.csv"}, []string{"d1/d10/1.csv", "d1/d20/2.csv"}), + + Entry(nil, "./d1/f.yaml", nil, nil), + Entry(nil, "d1/f.yaml", []string{"/1.csv"}, []string{"/1.csv"}), + Entry(nil, "./d1/f.yaml", []string{"/1.csv", "/2.csv"}, []string{"/1.csv", "/2.csv"}), + Entry(nil, "d1/f.yaml", []string{"/d10/1.csv", "/d20/2.csv"}, []string{"/d10/1.csv", "/d20/2.csv"}), + ) + + Describe(".OptimizePathWildCard", func() { + var ( + wd string + ) + BeforeEach(func() { + var err error + wd, err = os.Getwd() + Expect(err).NotTo(HaveOccurred()) + }) + + It("nil", func() { + var sources Sources + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + }) + + It("rel:WildCard:yes", func() { + sources := make(Sources, 1) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file*"), + } + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + if Expect(sources).To(HaveLen(3)) { + Expect(sources[0].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file10"))) + Expect(sources[1].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file11"))) + Expect(sources[2].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file20"))) + } + }) + + It("rel:WildCard:no", func() { + sources := make(Sources, 3) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file10"), + } + sources[1].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file11"), + } + sources[2].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file20"), + } + + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + if Expect(sources).To(HaveLen(3)) { + Expect(sources[0].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file10"))) + Expect(sources[1].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file11"))) + Expect(sources[2].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file20"))) + } + }) + + It("abs:WildCard:yes", func() { + sources := make(Sources, 1) + sources[0].SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join(wd, "testdata", "file*"), + } + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + if Expect(sources).To(HaveLen(3)) { + Expect(sources[0].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file10"))) + Expect(sources[1].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file11"))) + Expect(sources[2].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file20"))) + } + }) + + It("abs:WildCard:no", func() { + sources := make(Sources, 3) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join(wd, "testdata", "file10"), + } + sources[1].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join(wd, "testdata", "file11"), + } + sources[2].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join(wd, "testdata", "file20"), + } + + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + if Expect(sources).To(HaveLen(3)) { + Expect(sources[0].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file10"))) + Expect(sources[1].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file11"))) + Expect(sources[2].SourceConfig.Local.Path).To(Equal(filepath.Join(wd, "testdata", "file20"))) + } + }) + + It("rel:WildCard:yes:s3", func() { + sources := make(Sources, 2) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file*"), + } + sources[1].Source.SourceConfig.S3 = &source.S3Config{ + Bucket: "bucket", + } + Expect(sources.OptimizePathWildCard()).NotTo(HaveOccurred()) + if Expect(sources).To(HaveLen(4)) { + Expect(sources[0].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file10"))) + Expect(sources[1].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file11"))) + Expect(sources[2].SourceConfig.Local.Path).To(Equal(filepath.Join("testdata", "file20"))) + Expect(sources[3].SourceConfig.S3.Bucket).To(Equal("bucket")) + } + }) + + It("failed", func() { + sources := make(Sources, 2) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file*"), + } + sources[1].SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "[a-b"), + } + Expect(sources.OptimizePathWildCard()).To(HaveOccurred()) + + sources = make(Sources, 2) + sources[0].Source.SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "file*"), + } + sources[1].SourceConfig.Local = &source.LocalConfig{ + Path: filepath.Join("testdata", "not-exists"), + } + Expect(sources.OptimizePathWildCard()).To(HaveOccurred()) + }) + }) +}) diff --git a/pkg/config/v3/testdata/file10 b/pkg/config/v3/testdata/file10 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/config/v3/testdata/file11 b/pkg/config/v3/testdata/file11 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/config/v3/testdata/file20 b/pkg/config/v3/testdata/file20 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/csv/csv_test.go b/pkg/csv/csv_test.go deleted file mode 100644 index e2cde3ec..00000000 --- a/pkg/csv/csv_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package csv - -import ( - "encoding/csv" - "os" - "testing" -) - -func TestCsvWriter(t *testing.T) { - file, err := os.CreateTemp("", "test") - if err != nil { - t.Fatal(err) - } - defer func() { - file.Close() - os.Remove(file.Name()) - }() - - w := csv.NewWriter(file) - - if err := w.Write([]string{"hash(\"hello\")", "234"}); err != nil { - t.Fatal(err) - } - w.Flush() - if w.Error() != nil { - t.Fatal(w.Error()) - } -} diff --git a/pkg/csv/errwriter.go b/pkg/csv/errwriter.go deleted file mode 100644 index a861c491..00000000 --- a/pkg/csv/errwriter.go +++ /dev/null @@ -1,60 +0,0 @@ -package csv - -import ( - "encoding/csv" - "os" - "strings" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type ErrWriter struct { - writer *csv.Writer - csvConfig *config.CSVConfig - runnerLogger *logger.RunnerLogger -} - -func NewErrDataWriter(config *config.CSVConfig, runnerLogger *logger.RunnerLogger) *ErrWriter { - return &ErrWriter{ - csvConfig: config, - runnerLogger: runnerLogger, - } -} - -func (w *ErrWriter) Error() error { - return w.writer.Error() -} - -func (w *ErrWriter) Init(f *os.File) { - w.writer = csv.NewWriter(f) -} - -func (w *ErrWriter) Write(data []base.Data) { - if len(data) == 0 { - logger.Log.Info("Empty error data") - } - for _, d := range data { - if *w.csvConfig.WithLabel { - var record []string - switch d.Type { - case base.INSERT: - record = append(record, "+") - case base.DELETE: - record = append(record, "-") - default: - logger.Log.Errorf("Error data type: %s, data: %s", d.Type, strings.Join(d.Record, ",")) - continue - } - record = append(record, d.Record...) - w.writer.Write(record) - } else { - w.writer.Write(d.Record) - } - } -} - -func (w *ErrWriter) Flush() { - w.writer.Flush() -} diff --git a/pkg/csv/reader.go b/pkg/csv/reader.go deleted file mode 100644 index b31d80ab..00000000 --- a/pkg/csv/reader.go +++ /dev/null @@ -1,105 +0,0 @@ -package csv - -import ( - "bufio" - "encoding/csv" - "errors" - "fmt" - "io" - "os" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type CSVReader struct { - CSVConfig *config.CSVConfig - reader *csv.Reader - lineNum uint64 - rr *recordReader - br *bufio.Reader - totalBytes int64 - initComplete bool - runnerLogger *logger.RunnerLogger -} - -type recordReader struct { - io.Reader - remainingBytes int -} - -func (r *recordReader) Read(p []byte) (n int, err error) { - n, err = r.Reader.Read(p) - r.remainingBytes += n - return -} - -func (r *CSVReader) InitReader(file *os.File, runnerLogger *logger.RunnerLogger) { - r.runnerLogger = runnerLogger - r.rr = &recordReader{ - Reader: file, - } - r.br = bufio.NewReader(r.rr) - r.reader = csv.NewReader(r.br) - if r.CSVConfig.Delimiter != nil { - d := []rune(*r.CSVConfig.Delimiter) - if len(d) > 0 { - r.reader.Comma = d[0] - logger.Log.Infof("The delimiter of %s is %#U", file.Name(), r.reader.Comma) - } - } - if r.CSVConfig.LazyQuotes != nil { - r.reader.LazyQuotes = *r.CSVConfig.LazyQuotes - } - stat, err := file.Stat() - if err != nil { - logger.Log.Infof("The stat of %s is wrong, %s", file.Name(), err) - } - r.totalBytes = stat.Size() - defer func() { - r.initComplete = true - }() -} - -func (r *CSVReader) ReadLine() (base.Data, error) { - line, err := r.reader.Read() - - if err != nil { - return base.Data{}, err - } - - r.lineNum++ - n := r.rr.remainingBytes - r.br.Buffered() - r.rr.remainingBytes -= n - - if *r.CSVConfig.WithHeader && r.lineNum == 1 { - if *r.CSVConfig.WithLabel { - return base.HeaderData(line[1:], n), nil - } else { - return base.HeaderData(line, n), nil - } - } - - if *r.CSVConfig.WithLabel { - switch line[0] { - case "+": - return base.InsertData(line[1:], n), nil - case "-": - return base.DeleteData(line[1:], n), nil - default: - return base.Data{ - Bytes: n, - }, fmt.Errorf("Invalid label: %s", line[0]) - } - } else { - return base.InsertData(line, n), nil - } -} - -func (r *CSVReader) TotalBytes() (int64, error) { - if r.initComplete { - return r.totalBytes, nil - } - return 0, errors.New("init not complete") -} diff --git a/pkg/errhandler/datawriter.go b/pkg/errhandler/datawriter.go deleted file mode 100644 index 1a6201a6..00000000 --- a/pkg/errhandler/datawriter.go +++ /dev/null @@ -1,14 +0,0 @@ -package errhandler - -import ( - "os" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" -) - -type DataWriter interface { - Init(*os.File) - Write([]base.Data) - Flush() - Error() error -} diff --git a/pkg/errhandler/handler.go b/pkg/errhandler/handler.go deleted file mode 100644 index 5ad22acd..00000000 --- a/pkg/errhandler/handler.go +++ /dev/null @@ -1,80 +0,0 @@ -package errhandler - -import ( - "fmt" - "os" - "strings" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/csv" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type Handler struct { - statsCh chan<- base.Stats -} - -func New(statsCh chan<- base.Stats) *Handler { - h := Handler{ - statsCh: statsCh, - } - - return &h -} - -func (w *Handler) Init(file *config.File, concurrency int, cleanup bool, runnerLogger *logger.RunnerLogger) (chan base.ErrData, error) { - var dataWriter DataWriter - switch strings.ToLower(*file.Type) { - case "csv": - dataWriter = csv.NewErrDataWriter(file.CSV, runnerLogger) - default: - return nil, fmt.Errorf("Wrong file type: %s", *file.Type) - } - - dataFile := base.MustCreateFile(*file.FailDataPath) - errCh := make(chan base.ErrData) - - go func() { - defer func() { - if err := dataFile.Close(); err != nil { - logger.Log.Errorf("Fail to close opened error data file: %s", *file.FailDataPath) - } - if cleanup { - if err := os.Remove(*file.FailDataPath); err != nil { - logger.Log.Errorf("Fail to remove error data file: %s", *file.FailDataPath) - } else { - logger.Log.Infof("Error data file has been removed: %s", *file.FailDataPath) - } - } - }() - defer close(errCh) - dataWriter.Init(dataFile) - - for { - rawErr := <-errCh - if rawErr.Error == nil { - concurrency-- - if concurrency == 0 { - break - } - } else { - dataWriter.Write(rawErr.Data) - logger.Log.Error(rawErr.Error.Error()) - var importedBytes int64 - for _, d := range rawErr.Data { - importedBytes += int64(d.Bytes) - } - w.statsCh <- base.NewFailureStats(len(rawErr.Data), importedBytes) - } - } - - dataWriter.Flush() - if dataWriter.Error() != nil { - logger.Log.Error(dataWriter.Error()) - } - w.statsCh <- base.NewFileDoneStats(*file.Path) - }() - - return errCh, nil -} diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go new file mode 100644 index 00000000..dcc5cb81 --- /dev/null +++ b/pkg/errors/errors.go @@ -0,0 +1,26 @@ +package errors + +import ( + stderrors "errors" +) + +var ( + ErrUnsupportedClientVersion = stderrors.New("unsupported client version") + ErrNoAddresses = stderrors.New("no addresses") + ErrInvalidAddress = stderrors.New("invalid address") + ErrInvalidIndex = stderrors.New("invalid index") + ErrNoSpaceName = stderrors.New("no space name") + ErrNoGraphName = stderrors.New("no graph name") + ErrNoNodeName = stderrors.New("no node name") + ErrNoNodeID = stderrors.New("no node id") + ErrNoEdgeSrc = stderrors.New("no edge src") + ErrNoEdgeDst = stderrors.New("no edge dst") + ErrNoEdgeName = stderrors.New("no edge name") + ErrNoNodeIDName = stderrors.New("no node id name") + ErrNoPropName = stderrors.New("no prop name") + ErrUnsupportedValueType = stderrors.New("unsupported value type") + ErrNoRecord = stderrors.New("no record") + ErrNoIndicesOrConcatItems = stderrors.New("no indices or concat items") + ErrUnsupportedConcatItemType = stderrors.New("unsupported concat item type") + ErrUnsupportedFunction = stderrors.New("unsupported function") +) diff --git a/pkg/errors/errors_suite_test.go b/pkg/errors/errors_suite_test.go new file mode 100644 index 00000000..34098a07 --- /dev/null +++ b/pkg/errors/errors_suite_test.go @@ -0,0 +1,13 @@ +package errors + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestErrors(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg errors Suite") +} diff --git a/pkg/errors/import.go b/pkg/errors/import.go new file mode 100644 index 00000000..40b87787 --- /dev/null +++ b/pkg/errors/import.go @@ -0,0 +1,213 @@ +package errors + +import ( + "errors" + "fmt" + "strings" +) + +const ( + fieldMessages = "messages" + fieldGraphName = "graph" + fieldEdgeName = "edge" + fieldNodeName = "node" + fieldNodeIDName = "nodeID" + fieldPropName = "prop" + fieldRecord = "record" + fieldStatement = "statement" +) + +var _ error = (*ImportError)(nil) + +type ( + ImportError struct { + Err error + Messages []string + fields map[string]any + } +) + +func NewImportError(err error, formatWithArgs ...any) *ImportError { + e := &ImportError{ + Err: err, + fields: map[string]any{}, + } + return e.AppendMessage(formatWithArgs...) +} + +func AsImportError(err error) (*ImportError, bool) { + if e := new(ImportError); errors.As(err, &e) { + return e, true + } + return nil, false +} + +func AsOrNewImportError(err error, formatWithArgs ...any) *ImportError { + e, ok := AsImportError(err) + if ok { + return e.AppendMessage(formatWithArgs...) + } + return NewImportError(err, formatWithArgs...) +} + +func (e *ImportError) AppendMessage(formatWithArgs ...any) *ImportError { + if len(formatWithArgs) > 0 { + var message string + if format, ok := formatWithArgs[0].(string); ok { + message = fmt.Sprintf(format, formatWithArgs[1:]...) + } else { + message = fmt.Sprint(formatWithArgs[0]) + } + if message != "" { + e.Messages = append(e.Messages, message) + } + } + return e +} + +func (e *ImportError) SetGraphName(graphName string) *ImportError { + return e.withField(fieldGraphName, graphName) +} + +func (e *ImportError) GraphName() string { + return e.getFieldString(fieldGraphName) +} + +func (e *ImportError) SetNodeName(nodeName string) *ImportError { + return e.withField(fieldNodeName, nodeName) +} + +func (e *ImportError) NodeName() string { + return e.getFieldString(fieldNodeName) +} + +func (e *ImportError) SetEdgeName(edgeName string) *ImportError { + return e.withField(fieldEdgeName, edgeName) +} + +func (e *ImportError) EdgeName() string { + return e.getFieldString(fieldEdgeName) +} + +func (e *ImportError) SetNodeIDName(nodeIDName string) *ImportError { + return e.withField(fieldNodeIDName, nodeIDName) +} + +func (e *ImportError) NodeIDName() string { + return e.getFieldString(fieldNodeIDName) +} + +func (e *ImportError) SetPropName(propName string) *ImportError { + return e.withField(fieldPropName, propName) +} + +func (e *ImportError) PropName() string { + return e.getFieldString(fieldPropName) +} + +func (e *ImportError) SetRecord(record []string) *ImportError { + return e.withField(fieldRecord, record) +} + +func (e *ImportError) Record() []string { + return e.getFieldStringSlice(fieldRecord) +} + +func (e *ImportError) SetStatement(statement string) *ImportError { + return e.withField(fieldStatement, statement) +} + +func (e *ImportError) Statement() string { + return e.getFieldString(fieldStatement) +} + +func (e *ImportError) Fields() map[string]any { + m := make(map[string]any, len(e.fields)+1) + for k, v := range e.fields { + m[k] = v + } + if len(e.Messages) > 0 { + m[fieldMessages] = e.Messages + } + return m +} + +func (e *ImportError) withField(key string, value any) *ImportError { + switch val := value.(type) { + case string: + if val == "" { + return e + } + case []string: + if len(val) == 0 { + return e + } + } + e.fields[key] = value + return e +} + +func (e *ImportError) getFieldString(key string) string { + v, ok := e.fields[key] + if !ok { + return "" + } + vv, ok := v.(string) + if !ok { + return "" + } + return vv +} + +func (e *ImportError) getFieldStringSlice(key string) []string { + v, ok := e.fields[key] + if !ok { + return nil + } + vv, ok := v.([]string) + if !ok { + return nil + } + return vv +} + +func (e *ImportError) Error() string { + var fields []string + if graphName := e.GraphName(); graphName != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldGraphName, graphName)) + } + if nodeName := e.NodeName(); nodeName != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldNodeName, nodeName)) + } + if edgeName := e.EdgeName(); edgeName != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldEdgeName, edgeName)) + } + if nodeIDName := e.NodeIDName(); nodeIDName != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldNodeIDName, nodeIDName)) + } + if propName := e.PropName(); propName != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldPropName, propName)) + } + if record := e.Record(); len(record) > 0 { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldRecord, record)) + } + if statement := e.Statement(); statement != "" { + fields = append(fields, fmt.Sprintf("%s(%s)", fieldStatement, statement)) + } + if len(e.Messages) > 0 { + fields = append(fields, fmt.Sprintf("%s%s", fieldMessages, strings.Join(e.Messages, ", "))) + } + if e.Err != nil { + fields = append(fields, e.Err.Error()) + } + + return strings.Join(fields, ": ") +} + +func (e *ImportError) Cause() error { + return e.Err +} + +func (e *ImportError) Unwrap() error { + return e.Err +} diff --git a/pkg/errors/import_test.go b/pkg/errors/import_test.go new file mode 100644 index 00000000..e2e0f5e4 --- /dev/null +++ b/pkg/errors/import_test.go @@ -0,0 +1,190 @@ +package errors + +import ( + stderrors "errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ImportError", func() { + Describe("NewImportError", func() { + It("nil", func() { + importError := NewImportError(nil) + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(BeNil()) + Expect(importError.Unwrap()).To(BeNil()) + Expect(importError.Messages).To(BeNil()) + Expect(importError.fields).To(BeEmpty()) + }) + + It("err", func() { + err := stderrors.New("test error") + importError := NewImportError(err) + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(Equal(err)) + Expect(importError.Unwrap()).To(Equal(err)) + Expect(importError.Messages).To(BeNil()) + Expect(importError.fields).To(BeEmpty()) + }) + + It("err with message empty", func() { + err := stderrors.New("test error") + importError := NewImportError(err, "") + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(Equal(err)) + Expect(importError.Unwrap()).To(Equal(err)) + Expect(importError.Messages).To(BeNil()) + Expect(importError.fields).To(BeEmpty()) + }) + + It("err with message", func() { + err := stderrors.New("test error") + importError := NewImportError(err, "test message") + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(Equal(err)) + Expect(importError.Unwrap()).To(Equal(err)) + Expect(importError.Messages).To(Equal([]string{"test message"})) + Expect(importError.fields).To(BeEmpty()) + }) + + It("err with message and format", func() { + err := stderrors.New("test error") + importError := NewImportError(err, "test message %d", 1) + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(Equal(err)) + Expect(importError.Unwrap()).To(Equal(err)) + Expect(importError.Messages).To(Equal([]string{"test message 1"})) + Expect(importError.fields).To(BeEmpty()) + }) + + It("err with message not string", func() { + err := stderrors.New("test error") + importError := NewImportError(err, 1) + Expect(importError).NotTo(BeNil()) + Expect(importError.Cause()).To(Equal(err)) + Expect(importError.Unwrap()).To(Equal(err)) + Expect(importError.Messages).To(Equal([]string{"1"})) + Expect(importError.fields).To(BeEmpty()) + }) + }) + + Describe("AsImportError", func() { + It("nil", func() { + err, ok := AsImportError(nil) + Expect(ok).To(BeFalse()) + Expect(err).To(BeNil()) + }) + + It("no import error", func() { + err, ok := AsImportError(stderrors.New("test error")) + Expect(ok).To(BeFalse()) + Expect(err).To(BeNil()) + }) + + It("import error", func() { + importError, ok := AsImportError(NewImportError(nil, "test message")) + Expect(ok).To(BeTrue()) + Expect(importError).NotTo(BeNil()) + Expect(importError.Messages).To(Equal([]string{"test message"})) + }) + }) + + Describe("AsOrNewImportError", func() { + It("nil", func() { + importError := AsOrNewImportError(nil) + Expect(importError).NotTo(BeNil()) + }) + + It("no import error", func() { + importError := AsOrNewImportError(stderrors.New("test error")) + Expect(importError).NotTo(BeNil()) + }) + + It("import error", func() { + importError := AsOrNewImportError( + NewImportError(nil, "test message"), + "test message %d", + 1, + ) + Expect(importError).NotTo(BeNil()) + Expect(importError.Messages).To(Equal([]string{"test message", "test message 1"})) + }) + }) + + It("Fields", func() { + importError := AsOrNewImportError(stderrors.New("test error")) + Expect(importError.Fields()).To(BeEmpty()) + + importError.AppendMessage("") + Expect(importError.Messages).To(BeEmpty()) + + importError.SetGraphName("") + Expect(importError.GraphName()).To(BeEmpty()) + + importError.SetNodeName("") + Expect(importError.NodeName()).To(BeEmpty()) + + importError.SetEdgeName("") + Expect(importError.EdgeName()).To(BeEmpty()) + + importError.SetNodeIDName("") + Expect(importError.NodeIDName()).To(BeEmpty()) + + importError.SetPropName("") + Expect(importError.PropName()).To(BeEmpty()) + + importError.SetRecord(nil) + Expect(importError.Record()).To(BeEmpty()) + + importError.SetStatement("") + Expect(importError.Statement()).To(BeEmpty()) + + Expect(importError.Fields()).To(BeEmpty()) + + importError.AppendMessage("test message") + importError.AppendMessage("test message %d", 1) + Expect(importError.Messages).To(Equal([]string{"test message", "test message 1"})) + + importError.SetGraphName("graphName") + Expect(importError.GraphName()).To(Equal("graphName")) + + importError.SetNodeName("nodeName") + Expect(importError.NodeName()).To(Equal("nodeName")) + + importError.SetEdgeName("edgeName") + Expect(importError.EdgeName()).To(Equal("edgeName")) + + importError.SetNodeIDName("nodeIDName") + Expect(importError.NodeIDName()).To(Equal("nodeIDName")) + + importError.SetPropName("propName") + Expect(importError.PropName()).To(Equal("propName")) + + importError.SetRecord([]string{"record1", "record2"}) + Expect(importError.Record()).To(Equal([]string{"record1", "record2"})) + + importError.SetStatement("test statement") + Expect(importError.Statement()).To(Equal("test statement")) + + Expect(importError.Fields()).To(Equal(map[string]any{ + "messages": []string{"test message", "test message 1"}, + "graph": "graphName", + "node": "nodeName", + "edge": "edgeName", + "nodeID": "nodeIDName", + "prop": "propName", + "record": []string{"record1", "record2"}, + "statement": "test statement", + })) + Expect(importError.Error()).To(Equal("graph(graphName): node(nodeName): edge(edgeName): nodeID(nodeIDName): prop(propName): record([record1 record2]): statement(test statement): messagestest message, test message 1: test error")) + }) + + It("withField", func() { + importError := AsOrNewImportError(stderrors.New("test error")) + importError.withField("f1", "str") + Expect(importError.getFieldStringSlice("f1")).To(BeEmpty()) + importError.withField("f2", []string{"str"}) + Expect(importError.getFieldString("f2")).To(BeEmpty()) + }) +}) diff --git a/pkg/errors/wrapper.go b/pkg/errors/wrapper.go deleted file mode 100644 index 16517e0b..00000000 --- a/pkg/errors/wrapper.go +++ /dev/null @@ -1,28 +0,0 @@ -package errors - -import "fmt" - -const ( - UnknownError = -1 - ConfigError = 1 - InvalidConfigPathOrFormat = 2 - DownloadError = 100 - NebulaError = 200 - NotCompleteError = 201 -) - -type ImporterError struct { - ErrCode int - ErrMsg error -} - -func (e ImporterError) Error() string { - return fmt.Sprintf("error code: %d, message: %s", e.ErrCode, e.ErrMsg.Error()) -} - -func Wrap(code int, err error) ImporterError { - return ImporterError{ - ErrCode: code, - ErrMsg: err, - } -} diff --git a/pkg/importer/importer.go b/pkg/importer/importer.go new file mode 100644 index 00000000..4d5e57d0 --- /dev/null +++ b/pkg/importer/importer.go @@ -0,0 +1,110 @@ +//go:generate mockgen -source=importer.go -destination importer_mock.go -package importer Importer +package importer + +import ( + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +type ( + Importer interface { + Wait() + Import(records ...spec.Record) (*ImportResp, error) + Done() + } + + ImportResp struct { + Latency time.Duration + RespTime time.Duration + } + + ImportResult struct { + Resp *ImportResp + Err error + } + + Option func(*defaultImporter) + + defaultImporter struct { + builder spec.StatementBuilder + pool client.Pool + fnWait func() + fnDone func() + } +) + +func New(builder spec.StatementBuilder, pool client.Pool, opts ...Option) Importer { + options := make([]Option, 0, 2+len(opts)) + options = append(options, WithStatementBuilder(builder), WithClientPool(pool)) + options = append(options, opts...) + return NewWithOpts(options...) +} + +func NewWithOpts(opts ...Option) Importer { + i := &defaultImporter{} + for _, opt := range opts { + opt(i) + } + return i +} + +func WithStatementBuilder(builder spec.StatementBuilder) Option { + return func(i *defaultImporter) { + i.builder = builder + } +} + +func WithClientPool(p client.Pool) Option { + return func(i *defaultImporter) { + i.pool = p + } +} + +func WithWaitFunc(fn func()) Option { + return func(i *defaultImporter) { + i.fnWait = fn + } +} + +func WithDoneFunc(fn func()) Option { + return func(i *defaultImporter) { + i.fnDone = fn + } +} + +func (i *defaultImporter) Wait() { + if i.fnWait != nil { + i.fnWait() + } +} + +func (i *defaultImporter) Import(records ...spec.Record) (*ImportResp, error) { + statement, err := i.builder.Build(records...) + if err != nil { + return nil, err + } + + resp, err := i.pool.Execute(statement) + if err != nil { + return nil, errors.NewImportError(err). + SetStatement(statement) + } + if !resp.IsSucceed() { + return nil, errors.NewImportError(err, "the execute error is %s ", resp.GetError()). + SetStatement(statement) + } + + return &ImportResp{ + RespTime: resp.GetRespTime(), + Latency: resp.GetLatency(), + }, nil +} + +func (i *defaultImporter) Done() { + if i.fnDone != nil { + i.fnDone() + } +} diff --git a/pkg/importer/importer_mock.go b/pkg/importer/importer_mock.go new file mode 100644 index 00000000..9a6d41cc --- /dev/null +++ b/pkg/importer/importer_mock.go @@ -0,0 +1,78 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: importer.go + +// Package importer is a generated GoMock package. +package importer + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + spec "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +// MockImporter is a mock of Importer interface. +type MockImporter struct { + ctrl *gomock.Controller + recorder *MockImporterMockRecorder +} + +// MockImporterMockRecorder is the mock recorder for MockImporter. +type MockImporterMockRecorder struct { + mock *MockImporter +} + +// NewMockImporter creates a new mock instance. +func NewMockImporter(ctrl *gomock.Controller) *MockImporter { + mock := &MockImporter{ctrl: ctrl} + mock.recorder = &MockImporterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockImporter) EXPECT() *MockImporterMockRecorder { + return m.recorder +} + +// Done mocks base method. +func (m *MockImporter) Done() { + m.ctrl.T.Helper() + m.ctrl.Call(m, "Done") +} + +// Done indicates an expected call of Done. +func (mr *MockImporterMockRecorder) Done() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Done", reflect.TypeOf((*MockImporter)(nil).Done)) +} + +// Import mocks base method. +func (m *MockImporter) Import(records ...spec.Record) (*ImportResp, error) { + m.ctrl.T.Helper() + varargs := []interface{}{} + for _, a := range records { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Import", varargs...) + ret0, _ := ret[0].(*ImportResp) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Import indicates an expected call of Import. +func (mr *MockImporterMockRecorder) Import(records ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*MockImporter)(nil).Import), records...) +} + +// Wait mocks base method. +func (m *MockImporter) Wait() { + m.ctrl.T.Helper() + m.ctrl.Call(m, "Wait") +} + +// Wait indicates an expected call of Wait. +func (mr *MockImporterMockRecorder) Wait() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Wait", reflect.TypeOf((*MockImporter)(nil).Wait)) +} diff --git a/pkg/importer/importer_suite_test.go b/pkg/importer/importer_suite_test.go new file mode 100644 index 00000000..9639bdf7 --- /dev/null +++ b/pkg/importer/importer_suite_test.go @@ -0,0 +1,13 @@ +package importer + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestImporter(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg importer Suite") +} diff --git a/pkg/importer/importer_test.go b/pkg/importer/importer_test.go new file mode 100644 index 00000000..eae20a0e --- /dev/null +++ b/pkg/importer/importer_test.go @@ -0,0 +1,138 @@ +package importer + +import ( + stderrors "errors" + "sync" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" + specbase "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/base" + + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Importer", func() { + var ( + ctrl *gomock.Controller + mockClientPool *client.MockPool + mockResponse *client.MockResponse + mockBuilder *specbase.MockStatementBuilder + ) + BeforeEach(func() { + ctrl = gomock.NewController(GinkgoT()) + mockClientPool = client.NewMockPool(ctrl) + mockResponse = client.NewMockResponse(ctrl) + mockBuilder = specbase.NewMockStatementBuilder(ctrl) + }) + + AfterEach(func() { + ctrl.Finish() + }) + + Describe("New", func() { + It("build failed", func() { + mockBuilder.EXPECT().Build(gomock.Any()).Return("", errors.ErrNoRecord) + + i := New(mockBuilder, mockClientPool) + resp, err := i.Import(spec.Record{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(resp).To(BeNil()) + }) + + It("execute failed", func() { + mockBuilder.EXPECT().Build(gomock.Any()).Return("statement", nil) + mockClientPool.EXPECT().Execute(gomock.Any()).Return(nil, stderrors.New("test error")) + + i := New(mockBuilder, mockClientPool) + resp, err := i.Import(spec.Record{"id"}) + Expect(err).To(HaveOccurred()) + importError, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(importError.Statement()).NotTo(BeEmpty()) + Expect(resp).To(BeNil()) + }) + + It("execute IsSucceed false", func() { + mockBuilder.EXPECT().Build(gomock.Any()).Return("statement", nil) + mockClientPool.EXPECT().Execute(gomock.Any()).Times(1).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(1).Return(false) + mockResponse.EXPECT().GetError().Times(1).Return(stderrors.New("status failed")) + + i := New(mockBuilder, mockClientPool) + resp, err := i.Import(spec.Record{"id"}) + Expect(err).To(HaveOccurred()) + importError, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(importError.Messages).To(ContainElement(ContainSubstring("status failed"))) + Expect(importError.Statement()).NotTo(BeEmpty()) + Expect(resp).To(BeNil()) + }) + + It("execute successfully", func() { + mockBuilder.EXPECT().Build(gomock.Any()).Times(1).Return("statement", nil) + mockClientPool.EXPECT().Execute(gomock.Any()).Times(1).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(1).Return(true) + mockResponse.EXPECT().GetLatency().Times(1).Return(time.Microsecond * 10) + mockResponse.EXPECT().GetRespTime().AnyTimes().Return(time.Microsecond * 12) + + i := New(mockBuilder, mockClientPool) + i.Wait() + defer i.Done() + resp, err := i.Import(spec.Record{"id"}) + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + Expect(resp.Latency).To(Equal(time.Microsecond * time.Duration(10))) + Expect(resp.RespTime).To(Equal(time.Microsecond * time.Duration(12))) + }) + + It("execute successfully with Wait and Done", func() { + mockBuilder.EXPECT().Build(gomock.Any()).Times(2).Return("statement", nil) + mockClientPool.EXPECT().Execute(gomock.Any()).Times(2).Return(mockResponse, nil) + mockResponse.EXPECT().IsSucceed().Times(2).Return(true) + mockResponse.EXPECT().GetLatency().Times(2).Return(time.Microsecond * 10) + mockResponse.EXPECT().GetRespTime().AnyTimes().Return(time.Microsecond * 12) + + var ( + wg sync.WaitGroup + isImporter1Done = false + ) + wg.Add(1) + // i2 need to wait i1 + i1 := New(mockBuilder, mockClientPool, + WithDoneFunc(func() { + time.Sleep(time.Millisecond) + isImporter1Done = true + wg.Done() + }), + ) + i2 := New(mockBuilder, mockClientPool, + WithWaitFunc(func() { + wg.Wait() + Expect(isImporter1Done).To(BeTrue()) + }), + ) + + go func() { + i1.Wait() + defer i1.Done() + resp, err := i1.Import(spec.Record{"id"}) + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + Expect(resp.Latency).To(Equal(time.Microsecond * time.Duration(10))) + }() + + i2.Wait() + defer i2.Done() + resp, err := i2.Import(spec.Record{"id"}) + Expect(err).NotTo(HaveOccurred()) + Expect(resp).NotTo(BeNil()) + Expect(resp.Latency).To(Equal(time.Microsecond * time.Duration(10))) + Expect(resp.RespTime).To(Equal(time.Microsecond * time.Duration(12))) + }) + }) +}) diff --git a/pkg/logger/adapter.go b/pkg/logger/adapter.go deleted file mode 100644 index b614e024..00000000 --- a/pkg/logger/adapter.go +++ /dev/null @@ -1,31 +0,0 @@ -package logger - -import ( - "fmt" -) - -type NebulaLogger struct { - runnerLogger *RunnerLogger -} - -func NewNebulaLogger(r *RunnerLogger) *NebulaLogger { - n := new(NebulaLogger) - n.runnerLogger = r - return n -} - -func (n NebulaLogger) Info(msg string) { - n.runnerLogger.infoWithSkip(2, fmt.Sprintf("[nebula-go] %s", msg)) -} - -func (n NebulaLogger) Warn(msg string) { - n.runnerLogger.warnWithSkip(2, fmt.Sprintf("[nebula-go] %s", msg)) -} - -func (n NebulaLogger) Error(msg string) { - n.runnerLogger.errorWithSkip(2, fmt.Sprintf("[nebula-go] %s", msg)) -} - -func (n NebulaLogger) Fatal(msg string) { - n.runnerLogger.fatalWithSkip(2, fmt.Sprintf("[nebula-go] %s", msg)) -} diff --git a/pkg/logger/field.go b/pkg/logger/field.go new file mode 100644 index 00000000..77ed349c --- /dev/null +++ b/pkg/logger/field.go @@ -0,0 +1,21 @@ +package logger + +type ( + Field struct { + Key string `yaml:"key"` + Value interface{} `yaml:"value"` + } + + Fields []Field +) + +func MapToFields(m map[string]any) Fields { + if len(m) == 0 { + return nil + } + fields := make(Fields, 0, len(m)) + for k, v := range m { + fields = append(fields, Field{Key: k, Value: v}) + } + return fields +} diff --git a/pkg/logger/field_test.go b/pkg/logger/field_test.go new file mode 100644 index 00000000..8d9365a8 --- /dev/null +++ b/pkg/logger/field_test.go @@ -0,0 +1,28 @@ +package logger + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Field", func() { + Describe("MapToFields", func() { + It("nil", func() { + fields := MapToFields(nil) + Expect(fields).To(BeNil()) + }) + + It("one", func() { + fields := MapToFields(map[string]any{ + "i": 1, + "f": 1.1, + "s": "str", + }) + Expect(fields).To(ConsistOf( + Field{Key: "i", Value: 1}, + Field{Key: "f", Value: 1.1}, + Field{Key: "s", Value: "str"}, + )) + }) + }) +}) diff --git a/pkg/logger/level.go b/pkg/logger/level.go new file mode 100644 index 00000000..72fcb7d6 --- /dev/null +++ b/pkg/logger/level.go @@ -0,0 +1,45 @@ +package logger + +import "strings" + +type Level int8 + +const ( + DebugLevel Level = iota - 1 + InfoLevel + WarnLevel + ErrorLevel + PanicLevel + FatalLevel +) + +var ( + levelMap = map[Level]string{ + DebugLevel: "DEBUG", + InfoLevel: "INFO", + WarnLevel: "WARN", + ErrorLevel: "ERROR", + PanicLevel: "PANIC", + FatalLevel: "FATAL", + } + levelRevMap = map[string]Level{ + "DEBUG": DebugLevel, + "INFO": InfoLevel, + "WARN": WarnLevel, + "ERROR": ErrorLevel, + "PANIC": PanicLevel, + "FATAL": FatalLevel, + } +) + +func ParseLevel(text string) Level { + level, ok := levelRevMap[strings.ToUpper(text)] + if !ok { + level = InfoLevel + } + return level +} + +func (lvl Level) String() string { + return levelMap[lvl] +} diff --git a/pkg/logger/level_test.go b/pkg/logger/level_test.go new file mode 100644 index 00000000..cde10ab0 --- /dev/null +++ b/pkg/logger/level_test.go @@ -0,0 +1,31 @@ +package logger + +import ( + "strings" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Level", func() { + DescribeTable("cases", + func(text string, lvl Level) { + l := ParseLevel(text) + Expect(l).To(Equal(lvl)) + if text == "" { + text = "info" + } + Expect(l.String()).To(Equal(strings.ToUpper(text))) + }, + EntryDescription("%[1]s"), + Entry(nil, "", InfoLevel), + Entry(nil, "debug", DebugLevel), + Entry(nil, "info", InfoLevel), + Entry(nil, "Info", InfoLevel), + Entry(nil, "INFO", InfoLevel), + Entry(nil, "warn", WarnLevel), + Entry(nil, "error", ErrorLevel), + Entry(nil, "panic", PanicLevel), + Entry(nil, "fatal", FatalLevel), + ) +}) diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 870fddcd..7539faa4 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -1,118 +1,34 @@ package logger -import ( - "fmt" - "io" - "log" - "os" - "path/filepath" - "runtime" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" -) - -type Logger interface { - Info(v ...interface{}) - Infof(format string, v ...interface{}) - Warn(v ...interface{}) - Warnf(format string, v ...interface{}) - Error(v ...interface{}) - Errorf(format string, v ...interface{}) - Fatal(v ...interface{}) - Fatalf(format string, v ...interface{}) -} - -var Log Logger = NewRunnerLogger("") - -func SetLogger(l Logger) { - Log = l -} - -// RunnerLogger TODO: Need to optimize it -type RunnerLogger struct { - logger *log.Logger -} - -func NewRunnerLogger(path string) *RunnerLogger { - var w io.Writer = os.Stdout - if path != "" { - file := base.MustCreateFile(path) - w = io.MultiWriter(file, os.Stdout) - } - logger := log.New(w, "", log.LstdFlags) - r := new(RunnerLogger) - r.logger = logger - return r -} - -func (r *RunnerLogger) Info(v ...interface{}) { - r.infoWithSkip(2, fmt.Sprint(v...)) -} - -func (r *RunnerLogger) Infof(format string, v ...interface{}) { - r.infoWithSkip(2, fmt.Sprintf(format, v...)) -} - -func (r *RunnerLogger) Warn(v ...interface{}) { - r.warnWithSkip(2, fmt.Sprint(v...)) -} - -func (r *RunnerLogger) Warnf(format string, v ...interface{}) { - r.warnWithSkip(2, fmt.Sprintf(format, v...)) -} - -func (r *RunnerLogger) Error(v ...interface{}) { - r.errorWithSkip(2, fmt.Sprint(v...)) -} - -func (r *RunnerLogger) Errorf(format string, v ...interface{}) { - r.errorWithSkip(2, fmt.Sprintf(format, v...)) -} - -func (r *RunnerLogger) Fatal(v ...interface{}) { - r.fatalWithSkip(2, fmt.Sprint(v...)) -} - -func (r *RunnerLogger) Fatalf(format string, v ...interface{}) { - r.fatalWithSkip(2, fmt.Sprintf(format, v...)) -} - -func (r *RunnerLogger) infoWithSkip(skip int, msg string) { - _, file, no, ok := runtime.Caller(skip) - if ok { - file = filepath.Base(file) - r.logger.Printf("[INFO] %s:%d: %s", file, no, msg) - } else { - r.logger.Fatalf("Fail to get caller info of logger.Log.Info") +type ( + Logger interface { + SkipCaller(skip int) Logger + + With(fields ...Field) Logger + WithError(err error) Logger + + Debug(msg string, fields ...Field) + Info(msg string, fields ...Field) + Warn(msg string, fields ...Field) + Error(msg string, fields ...Field) + Fatal(msg string, fields ...Field) + Panic(msg string, fields ...Field) + + Sync() error + Close() error } -} +) -func (r *RunnerLogger) warnWithSkip(skip int, msg string) { - _, file, no, ok := runtime.Caller(skip) - if ok { - file = filepath.Base(file) - r.logger.Printf("[WARN] %s:%d: %s", file, no, msg) - } else { - r.logger.Fatalf("Fail to get caller info of logger.Log.Warn") - } -} +func New(opts ...Option) (Logger, error) { + o := defaultOptions -func (r *RunnerLogger) errorWithSkip(skip int, msg string) { - _, file, no, ok := runtime.Caller(skip) - if ok { - file = filepath.Base(file) - r.logger.Printf("[ERROR] %s:%d: %s", file, no, msg) - } else { - r.logger.Fatalf("Fail to get caller info of logger.Log.Error") + for _, opt := range opts { + opt(&o) } -} -func (r *RunnerLogger) fatalWithSkip(skip int, msg string) { - _, file, no, ok := runtime.Caller(skip) - if ok { - file = filepath.Base(file) - r.logger.Fatalf("[FATAL] %s:%d: %s", file, no, msg) - } else { - r.logger.Fatalf("Fail to get caller info of logger.Fatal") + l, err := newZapLogger(&o) + if err != nil { + return nil, err } + return l, nil } diff --git a/pkg/logger/logger_suite_test.go b/pkg/logger/logger_suite_test.go new file mode 100644 index 00000000..2226fc29 --- /dev/null +++ b/pkg/logger/logger_suite_test.go @@ -0,0 +1,13 @@ +package logger + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestLogger(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg logger Suite") +} diff --git a/pkg/logger/logger_test.go b/pkg/logger/logger_test.go new file mode 100644 index 00000000..e41f0e09 --- /dev/null +++ b/pkg/logger/logger_test.go @@ -0,0 +1,23 @@ +package logger + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Level", func() { + It("New", func() { + l, err := New(WithConsole(false)) + Expect(err).NotTo(HaveOccurred()) + Expect(l).NotTo(BeNil()) + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + err = l.Close() + Expect(err).NotTo(HaveOccurred()) + }) + It("New failed", func() { + l, err := New(WithConsole(false), WithFiles("not-exists/1.log")) + Expect(err).To(HaveOccurred()) + Expect(l).To(BeNil()) + }) +}) diff --git a/pkg/logger/nop.go b/pkg/logger/nop.go new file mode 100644 index 00000000..bde62d2d --- /dev/null +++ b/pkg/logger/nop.go @@ -0,0 +1,21 @@ +package logger + +var NopLogger Logger = nopLogger{} + +type nopLogger struct{} + +//revive:disable:empty-lines + +func (l nopLogger) SkipCaller(int) Logger { return l } +func (l nopLogger) With(...Field) Logger { return l } +func (l nopLogger) WithError(error) Logger { return l } +func (nopLogger) Debug(string, ...Field) {} +func (nopLogger) Info(string, ...Field) {} +func (nopLogger) Warn(string, ...Field) {} +func (nopLogger) Error(string, ...Field) {} +func (nopLogger) Fatal(string, ...Field) {} +func (nopLogger) Panic(string, ...Field) {} +func (nopLogger) Sync() error { return nil } +func (nopLogger) Close() error { return nil } + +//revive:enable:empty-lines diff --git a/pkg/logger/nop_test.go b/pkg/logger/nop_test.go new file mode 100644 index 00000000..e0ef4196 --- /dev/null +++ b/pkg/logger/nop_test.go @@ -0,0 +1,28 @@ +package logger + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("nopLogger", func() { + It("nopLogger", func() { + var ( + l Logger + err error + ) + l = nopLogger{} + l = l.SkipCaller(1).With().WithError(nil) + l.Debug("") + l.Info("") + l.Warn("") + l.Error("") + l.Panic("") + l.Fatal("") + Expect(l).NotTo(BeNil()) + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + err = l.Close() + Expect(err).NotTo(HaveOccurred()) + }) +}) diff --git a/pkg/logger/option.go b/pkg/logger/option.go new file mode 100644 index 00000000..a09ef175 --- /dev/null +++ b/pkg/logger/option.go @@ -0,0 +1,56 @@ +package logger + +import "time" + +var defaultOptions = options{ + level: InfoLevel, + console: true, + timeLayout: time.RFC3339, +} + +type ( + options struct { + level Level + fields Fields + console bool + timeLayout string + files []string + } + Option func(*options) +) + +func WithLevel(lvl Level) Option { + return func(o *options) { + o.level = lvl + } +} + +func WithLevelText(text string) Option { + return func(o *options) { + WithLevel(ParseLevel(text))(o) + } +} + +func WithFields(fields ...Field) Option { + return func(o *options) { + o.fields = fields + } +} + +func WithConsole(console bool) Option { + return func(o *options) { + o.console = console + } +} + +func WithTimeLayout(layout string) Option { + return func(o *options) { + o.timeLayout = layout + } +} + +func WithFiles(files ...string) Option { + return func(o *options) { + o.files = files + } +} diff --git a/pkg/logger/option_test.go b/pkg/logger/option_test.go new file mode 100644 index 00000000..b909b7d5 --- /dev/null +++ b/pkg/logger/option_test.go @@ -0,0 +1,62 @@ +package logger + +import ( + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Option", func() { + It("WithLevel", func() { + o := options{ + level: DebugLevel, + } + WithLevel(WarnLevel)(&o) + Expect(o.level).To(Equal(WarnLevel)) + }) + + It("WithLevelText", func() { + o := options{ + level: DebugLevel, + } + WithLevelText("")(&o) + Expect(o.level).To(Equal(InfoLevel)) + }) + + It("WithFields", func() { + fields := Fields{ + {Key: "i", Value: 1}, + {Key: "f", Value: 1.1}, + {Key: "s", Value: "str"}, + } + o := options{} + WithFields(fields...)(&o) + Expect(o.fields).To(Equal(fields)) + }) + + It("WithConsole", func() { + o := options{} + WithConsole(true)(&o) + Expect(o.console).To(Equal(true)) + }) + + It("WithConsole", func() { + o := options{} + WithTimeLayout(time.RFC3339)(&o) + Expect(o.timeLayout).To(Equal(time.RFC3339)) + }) + + It("nopLogger", func() { + files := []string{"f1", "f2"} + o := options{} + WithFiles(files...)(&o) + Expect(o.files).To(Equal(files)) + }) + + It("nopLogger", func() { + o := options{} + WithConsole(true)(&o) + Expect(o.console).To(BeTrue()) + }) +}) diff --git a/pkg/logger/zap.go b/pkg/logger/zap.go new file mode 100644 index 00000000..5478b119 --- /dev/null +++ b/pkg/logger/zap.go @@ -0,0 +1,148 @@ +package logger + +import ( + "os" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +type zapLogger struct { + l *zap.Logger + cleanup func() +} + +func newZapLogger(o *options) (*zapLogger, error) { + l := &zapLogger{} + + atomicLevel := zap.NewAtomicLevelAt(toZapLevel(o.level)) + + var cores []zapcore.Core + encoderCfg := zap.NewProductionEncoderConfig() + if o.timeLayout != "" { + encoderCfg.EncodeTime = zapcore.TimeEncoderOfLayout(o.timeLayout) + } + if o.console { + cores = append(cores, + zapcore.NewCore( + zapcore.NewJSONEncoder(encoderCfg), + zapcore.Lock(os.Stdout), + atomicLevel, + ), + ) + } + if len(o.files) > 0 { + sink, cleanup, err := zap.Open(o.files...) + if err != nil { + return nil, err + } + l.cleanup = cleanup + cores = append(cores, + zapcore.NewCore( + zapcore.NewJSONEncoder(encoderCfg), + sink, + atomicLevel, + ), + ) + } + + l.l = zap.New( + zapcore.NewTee(cores...), + zap.AddCaller(), + zap.AddCallerSkip(1), + zap.Fields(toZapFields(o.fields...)...), + ) + + return l, nil +} + +func (l *zapLogger) SkipCaller(skip int) Logger { + cpy := l.clone() + cpy.l = cpy.l.WithOptions(zap.AddCallerSkip(skip)) + return cpy +} + +func (l *zapLogger) With(fields ...Field) Logger { + cpy := l.clone() + cpy.l = cpy.l.With(toZapFields(fields...)...) + return cpy +} + +func (l *zapLogger) WithError(err error) Logger { + cpy := l.clone() + cpy.l = cpy.l.With(zap.Error(err)) + return cpy +} + +func (l *zapLogger) Debug(msg string, fields ...Field) { + l.l.Debug(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Info(msg string, fields ...Field) { + l.l.Info(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Warn(msg string, fields ...Field) { + l.l.Warn(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Error(msg string, fields ...Field) { + l.l.Error(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Fatal(msg string, fields ...Field) { + l.l.Fatal(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Panic(msg string, fields ...Field) { + l.l.Panic(msg, toZapFields(fields...)...) +} + +func (l *zapLogger) Sync() error { + return l.l.Sync() +} + +func (l *zapLogger) Close() error { + if l.cleanup != nil { + defer l.cleanup() + } + //revive:disable-next-line:if-return + if err := l.Sync(); err != nil { + return err + } + return nil +} + +func (l *zapLogger) clone() *zapLogger { + cpy := *l + return &cpy +} + +func toZapFields(fields ...Field) []zap.Field { + if len(fields) == 0 { + return nil + } + zapFields := make([]zap.Field, len(fields)) + for i, field := range fields { + zapFields[i] = zap.Any(field.Key, field.Value) + } + return zapFields +} + +func toZapLevel(lvl Level) zapcore.Level { + switch lvl { + case DebugLevel: + return zap.DebugLevel + case InfoLevel: + return zap.InfoLevel + case WarnLevel: + return zap.WarnLevel + case ErrorLevel: + return zap.ErrorLevel + case PanicLevel: + return zap.PanicLevel + case FatalLevel: + return zap.FatalLevel + } + return zap.InfoLevel +} diff --git a/pkg/logger/zap_test.go b/pkg/logger/zap_test.go new file mode 100644 index 00000000..15bcfa65 --- /dev/null +++ b/pkg/logger/zap_test.go @@ -0,0 +1,348 @@ +package logger + +import ( + "encoding/json" + stderrors "errors" + "os" + "path/filepath" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +var _ = Describe("zapLogger", func() { + Describe("logs", func() { + var ( + tmpdir string + file1 string + file2 string + l Logger + ) + BeforeEach(func() { + var err error + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + file1 = filepath.Join(tmpdir, "1.log") + file2 = filepath.Join(tmpdir, "2.log") + var zl *zapLogger + zl, err = newZapLogger(&options{ + level: InfoLevel, + fields: Fields{{Key: "key1", Value: "value1"}, {Key: "key2", Value: "value2"}}, + console: false, + files: []string{file1, file2}, + }) + l = zl + // Set fatal hook to prevent exit + zl.l = zl.l.WithOptions(zap.WithFatalHook(zapcore.WriteThenPanic)) + Expect(err).NotTo(HaveOccurred()) + Expect(l).NotTo(BeNil()) + }) + AfterEach(func() { + var err error + Expect(l).NotTo(BeNil()) + err = l.Close() + Expect(err).NotTo(HaveOccurred()) + err = os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }) + + It("debug", func() { + var ( + err error + content1, content2 []byte + ) + l.Debug("debug message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(BeEmpty()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content2).To(BeEmpty()) + }) + + It("info", func() { + var ( + err error + content1, content2 []byte + ) + l.Info("info message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "info message")) + Expect(m).To(HaveKeyWithValue("level", "info")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("warn", func() { + var ( + err error + content1, content2 []byte + ) + l.Warn("warn message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "warn message")) + Expect(m).To(HaveKeyWithValue("level", "warn")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("error", func() { + var ( + err error + content1, content2 []byte + ) + l.Error("error message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "error message")) + Expect(m).To(HaveKeyWithValue("level", "error")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("panic", func() { + var ( + err error + content1, content2 []byte + ) + done := make(chan struct{}) + go func() { + defer func() { + r := recover() + Expect(r).NotTo(BeNil()) + done <- struct{}{} + }() + l.Panic("panic message") + }() + <-done + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "panic message")) + Expect(m).To(HaveKeyWithValue("level", "panic")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("fatal", func() { + var ( + err error + content1, content2 []byte + ) + done := make(chan struct{}) + go func() { + defer func() { + r := recover() + Expect(r).NotTo(BeNil()) + done <- struct{}{} + }() + l.Fatal("fatal message") + }() + <-done + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "fatal message")) + Expect(m).To(HaveKeyWithValue("level", "fatal")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("SkipCaller", func() { + var ( + err error + content1, content2 []byte + ) + l.SkipCaller(-1).Info("info message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", Not(ContainSubstring("logger/zap_test.go")))) + Expect(m).To(HaveKeyWithValue("msg", "info message")) + Expect(m).To(HaveKeyWithValue("level", "info")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + }) + + It("With", func() { + var ( + err error + content1, content2 []byte + ) + l = l.With(Field{Key: "key3", Value: "value3"}, Field{Key: "key4", Value: "value4"}) + l.Info("info message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "info message")) + Expect(m).To(HaveKeyWithValue("level", "info")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + Expect(m).To(HaveKeyWithValue("key3", "value3")) + Expect(m).To(HaveKeyWithValue("key4", "value4")) + }) + + It("WithError", func() { + var ( + err error + content1, content2 []byte + ) + l.WithError(stderrors.New("test error")).Info("info message") + err = l.Sync() + Expect(err).NotTo(HaveOccurred()) + content1, err = os.ReadFile(file1) + Expect(err).NotTo(HaveOccurred()) + content2, err = os.ReadFile(file2) + Expect(err).NotTo(HaveOccurred()) + Expect(content1).To(Equal(content2)) + m := map[string]any{} + err = json.Unmarshal(content1, &m) + Expect(err).NotTo(HaveOccurred()) + Expect(m).To(HaveKeyWithValue("caller", ContainSubstring("logger/zap_test.go"))) + Expect(m).To(HaveKeyWithValue("msg", "info message")) + Expect(m).To(HaveKeyWithValue("level", "info")) + Expect(m).To(HaveKeyWithValue("key1", "value1")) + Expect(m).To(HaveKeyWithValue("key2", "value2")) + Expect(m).To(HaveKeyWithValue("error", "test error")) + }) + }) + + DescribeTable("toZapLevel", + func(lvl Level, zapLvl zapcore.Level) { + Expect(toZapLevel(lvl)).To(Equal(zapLvl)) + }, + EntryDescription("%[1]s"), + Entry(nil, DebugLevel-1, zap.InfoLevel), + Entry(nil, DebugLevel, zap.DebugLevel), + Entry(nil, InfoLevel, zap.InfoLevel), + Entry(nil, WarnLevel, zap.WarnLevel), + Entry(nil, ErrorLevel, zap.ErrorLevel), + Entry(nil, PanicLevel, zap.PanicLevel), + Entry(nil, FatalLevel, zap.FatalLevel), + ) + + It("enable console", func() { + var ( + l Logger + err error + ) + l, err = newZapLogger(&options{ + console: true, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(l).NotTo(BeNil()) + _ = l.Close() + }) + + It("enable console", func() { + var ( + l Logger + err error + ) + l, err = newZapLogger(&options{ + timeLayout: time.RFC3339, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(l).NotTo(BeNil()) + _ = l.Close() + }) + + It("test sync failed console", func() { + var ( + l Logger + tmpdir string + err error + ) + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + defer func() { + err = os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }() + + l, err = newZapLogger(&options{ + files: []string{filepath.Join(tmpdir, "1.log")}, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(l).NotTo(BeNil()) + + err = l.Close() + Expect(err).NotTo(HaveOccurred()) + + err = l.Sync() + Expect(err).To(HaveOccurred()) + }) + + It("open file error", func() { + l, err := newZapLogger(&options{ + files: []string{"not-exists/1.log"}, + }) + Expect(err).To(HaveOccurred()) + Expect(l).To(BeNil()) + }) +}) diff --git a/pkg/manager/hooks.go b/pkg/manager/hooks.go new file mode 100644 index 00000000..93b18cc6 --- /dev/null +++ b/pkg/manager/hooks.go @@ -0,0 +1,22 @@ +package manager + +import "time" + +const ( + BeforeHook = HookName("before") + AfterHook = HookName("after") +) + +type ( + Hooks struct { + Before []*Hook `yaml:"before,omitempty"` + After []*Hook `yaml:"after,omitempty"` + } + + HookName string + + Hook struct { + Statements []string `yaml:"statements"` + Wait time.Duration `yaml:"wait,omitempty"` + } +) diff --git a/pkg/manager/manager.go b/pkg/manager/manager.go new file mode 100644 index 00000000..0866ad85 --- /dev/null +++ b/pkg/manager/manager.go @@ -0,0 +1,417 @@ +//go:generate mockgen -source=manager.go -destination manager_mock.go -package manager Manager +package manager + +import ( + "fmt" + "io" + "sync" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/importer" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/reader" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" + "github.com/vesoft-inc/nebula-importer/v4/pkg/stats" + + "github.com/panjf2000/ants" +) + +const ( + DefaultReaderConcurrency = 50 + DefaultImporterConcurrency = 512 + DefaultStatsInterval = time.Second * 10 +) + +type ( + Manager interface { + Import(s source.Source, brr reader.BatchRecordReader, importers ...importer.Importer) error + Start() error + Wait() error + Stats() *stats.Stats + Stop() error + } + + defaultManager struct { + graphName string + pool client.Pool + getClientOptions []client.Option + stats *stats.ConcurrencyStats + batch int + readerConcurrency int + readerWaitGroup sync.WaitGroup + readerPool *ants.Pool + importerConcurrency int + importerWaitGroup sync.WaitGroup + importerPool *ants.Pool + statsInterval time.Duration + hooks *Hooks + chStart chan struct{} + done chan struct{} + logger logger.Logger + } + + Option func(*defaultManager) +) + +func New(pool client.Pool, opts ...Option) Manager { + options := make([]Option, 0, 1+len(opts)) + options = append(options, WithClientPool(pool)) + options = append(options, opts...) + return NewWithOpts(options...) +} + +func NewWithOpts(opts ...Option) Manager { + m := &defaultManager{ + stats: stats.NewConcurrencyStats(), + readerConcurrency: DefaultReaderConcurrency, + importerConcurrency: DefaultImporterConcurrency, + statsInterval: DefaultStatsInterval, + hooks: &Hooks{}, + chStart: make(chan struct{}), + done: make(chan struct{}), + } + + for _, opt := range opts { + opt(m) + } + + m.readerPool, _ = ants.NewPool(m.readerConcurrency) + m.importerPool, _ = ants.NewPool(m.importerConcurrency) + + if m.logger == nil { + m.logger = logger.NopLogger + } + + return m +} + +func WithGraphName(graphName string) Option { + return func(m *defaultManager) { + m.graphName = graphName + } +} + +func WithClientPool(pool client.Pool) Option { + return func(m *defaultManager) { + m.pool = pool + } +} + +func WithGetClientOptions(opts ...client.Option) Option { + return func(m *defaultManager) { + m.getClientOptions = opts + } +} + +func WithBatch(batch int) Option { + return func(m *defaultManager) { + if batch > 0 { + m.batch = batch + } + } +} +func WithReaderConcurrency(concurrency int) Option { + return func(m *defaultManager) { + if concurrency > 0 { + m.readerConcurrency = concurrency + } + } +} + +func WithImporterConcurrency(concurrency int) Option { + return func(m *defaultManager) { + if concurrency > 0 { + m.importerConcurrency = concurrency + } + } +} + +func WithStatsInterval(statsInterval time.Duration) Option { + return func(m *defaultManager) { + if statsInterval > 0 { + m.statsInterval = statsInterval + } + } +} + +func WithBeforeHooks(hooks ...*Hook) Option { + return func(m *defaultManager) { + m.hooks.Before = hooks + } +} + +func WithAfterHooks(hooks ...*Hook) Option { + return func(m *defaultManager) { + m.hooks.After = hooks + } +} + +func WithLogger(l logger.Logger) Option { + return func(m *defaultManager) { + m.logger = l + } +} + +func (m *defaultManager) Import(s source.Source, brr reader.BatchRecordReader, importers ...importer.Importer) error { + if len(importers) == 0 { + return nil + } + + logSourceField := logger.Field{Key: "source", Value: s.Name()} + + if err := s.Open(); err != nil { + err = errors.NewImportError(err, "manager: open import source failed").SetGraphName(m.graphName) + m.logError(err, "", logSourceField) + return err + } + + nBytes, err := s.Size() + if err != nil { + _ = s.Close() + err = errors.NewImportError(err, "manager: get size of import source failed").SetGraphName(m.graphName) + m.logError(err, "", logSourceField) + return err + } + m.stats.AddTotalBytes(nBytes) + + m.readerWaitGroup.Add(1) + cleanup := func() { + for _, i := range importers { + i.Done() + } + m.readerWaitGroup.Done() + s.Close() + } + + go func() { + err = m.readerPool.Submit(func() { + <-m.chStart + defer cleanup() + + for _, i := range importers { + i.Wait() + } + _ = m.loopImport(s, brr, importers...) + }) + if err != nil { + cleanup() + m.logError(err, "manager: submit reader failed", logSourceField) + } + }() + + m.logger.Info("manager: add import source successfully", logSourceField) + return nil +} + +func (m *defaultManager) Start() error { + m.logger.Info("manager: starting") + + if err := m.Before(); err != nil { + return err + } + + m.stats.Init() + + if err := m.pool.Open(); err != nil { + m.logger.WithError(err).Error("manager: start client pool failed") + return err + } + + close(m.chStart) + + go m.loopPrintStats() + m.logger.Info("manager: start successfully") + return nil +} + +func (m *defaultManager) Wait() error { + m.logger.Info("manager: wait") + + m.readerWaitGroup.Wait() + m.importerWaitGroup.Wait() + + m.logStats() + + if err := m.After(); err != nil { + return err + } + + m.logger.Info("manager: wait successfully") + return nil +} + +func (m *defaultManager) Stats() *stats.Stats { + return m.stats.Stats() +} + +func (m *defaultManager) Stop() (err error) { + m.logger.Info("manager: stop") + defer func() { + if err != nil { + err = errors.NewImportError(err, "manager: stop failed") + m.logError(err, "") + } else { + m.logger.Info("manager: stop successfully") + } + }() + close(m.done) + return m.Wait() +} + +func (m *defaultManager) Before() error { + m.logger.Info("manager: exec before hook") + return m.execHooks(BeforeHook) +} + +func (m *defaultManager) After() error { + m.logger.Info("manager: exec after hook") + return m.execHooks(AfterHook) +} + +func (m *defaultManager) execHooks(name HookName) error { + var hooks []*Hook + switch name { + case BeforeHook: + hooks = m.hooks.Before + case AfterHook: + hooks = m.hooks.After + } + if len(hooks) == 0 { + return nil + } + + var cli client.Client + for _, hook := range hooks { + if hook == nil { + continue + } + for _, statement := range hook.Statements { + if statement == "" { + continue + } + + if cli == nil { + var err error + cli, err = m.pool.GetClient(m.getClientOptions...) + if err != nil { + return err + } + } + resp, err := cli.Execute(statement) + if err != nil { + err = errors.NewImportError(err, + "manager: exec failed in %s hook", name, + ).SetStatement(statement) + m.logError(err, "") + return err + } + if !resp.IsSucceed() { + err = errors.NewImportError(err, + "manager: exec failed in %s hook, %s", name, resp.GetError(), + ).SetStatement(statement) + m.logError(err, "") + return err + } + } + if hook.Wait != 0 { + m.logger.Info(fmt.Sprintf("manager: waiting %s", hook.Wait)) + time.Sleep(hook.Wait) + } + } + return nil +} + +func (m *defaultManager) loopImport(s source.Source, r reader.BatchRecordReader, importers ...importer.Importer) error { + logSourceField := logger.Field{Key: "source", Value: s.Name()} + for { + select { + case <-m.done: + return nil + default: + nBytes, records, err := r.ReadBatch() + if err != nil { + if err != io.EOF { + err = errors.NewImportError(err, "manager: read batch failed").SetGraphName(m.graphName) + m.logError(err, "", logSourceField) + return err + } + return nil + } + m.submitImporterTask(nBytes, records, importers...) + } + } +} + +func (m *defaultManager) submitImporterTask(nBytes int, records spec.Records, importers ...importer.Importer) { + m.importerWaitGroup.Add(1) + if err := m.importerPool.Submit(func() { + defer m.importerWaitGroup.Done() + var isFailed bool + if len(records) > 0 { + for _, i := range importers { + result, err := i.Import(records...) + if err != nil { + m.logError(err, "manager: import failed") + m.onRequestFailed(records) + isFailed = true + // do not return, continue the subsequent importer. + } else { + m.onRequestSucceeded(records, result) + } + } + } + if isFailed { + m.onFailed(nBytes, records) + } else { + m.onSucceeded(nBytes, records) + } + }); err != nil { + m.importerWaitGroup.Done() + m.logError(err, "manager: submit importer failed") + } +} + +func (m *defaultManager) loopPrintStats() { + if m.statsInterval <= 0 { + return + } + ticker := time.NewTicker(m.statsInterval) + m.logStats() + for { + select { + case <-ticker.C: + m.logStats() + case <-m.done: + return + } + } +} + +func (m *defaultManager) logStats() { + m.logger.Info(m.Stats().String()) +} + +func (m *defaultManager) onFailed(nBytes int, records spec.Records) { + m.stats.Failed(int64(nBytes), int64(len(records))) +} + +func (m *defaultManager) onSucceeded(nBytes int, records spec.Records) { + m.stats.Succeeded(int64(nBytes), int64(len(records))) +} + +func (m *defaultManager) onRequestFailed(records spec.Records) { + m.stats.RequestFailed(int64(len(records))) +} + +func (m *defaultManager) onRequestSucceeded(records spec.Records, result *importer.ImportResp) { + m.stats.RequestSucceeded(int64(len(records)), result.Latency, result.RespTime) +} + +func (m *defaultManager) logError(err error, msg string, fields ...logger.Field) { + e := errors.AsOrNewImportError(err) + fields = append(fields, logger.MapToFields(e.Fields())...) + m.logger.SkipCaller(1).WithError(e.Cause()).Error(msg, fields...) +} diff --git a/pkg/manager/manager_mock.go b/pkg/manager/manager_mock.go new file mode 100644 index 00000000..b795669a --- /dev/null +++ b/pkg/manager/manager_mock.go @@ -0,0 +1,113 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: manager.go + +// Package manager is a generated GoMock package. +package manager + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + importer "github.com/vesoft-inc/nebula-importer/v4/pkg/importer" + reader "github.com/vesoft-inc/nebula-importer/v4/pkg/reader" + source "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + stats "github.com/vesoft-inc/nebula-importer/v4/pkg/stats" +) + +// MockManager is a mock of Manager interface. +type MockManager struct { + ctrl *gomock.Controller + recorder *MockManagerMockRecorder +} + +// MockManagerMockRecorder is the mock recorder for MockManager. +type MockManagerMockRecorder struct { + mock *MockManager +} + +// NewMockManager creates a new mock instance. +func NewMockManager(ctrl *gomock.Controller) *MockManager { + mock := &MockManager{ctrl: ctrl} + mock.recorder = &MockManagerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockManager) EXPECT() *MockManagerMockRecorder { + return m.recorder +} + +// Import mocks base method. +func (m *MockManager) Import(s source.Source, brr reader.BatchRecordReader, importers ...importer.Importer) error { + m.ctrl.T.Helper() + varargs := []interface{}{s, brr} + for _, a := range importers { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Import", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// Import indicates an expected call of Import. +func (mr *MockManagerMockRecorder) Import(s, brr interface{}, importers ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{s, brr}, importers...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Import", reflect.TypeOf((*MockManager)(nil).Import), varargs...) +} + +// Start mocks base method. +func (m *MockManager) Start() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Start") + ret0, _ := ret[0].(error) + return ret0 +} + +// Start indicates an expected call of Start. +func (mr *MockManagerMockRecorder) Start() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Start", reflect.TypeOf((*MockManager)(nil).Start)) +} + +// Stats mocks base method. +func (m *MockManager) Stats() *stats.Stats { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Stats") + ret0, _ := ret[0].(*stats.Stats) + return ret0 +} + +// Stats indicates an expected call of Stats. +func (mr *MockManagerMockRecorder) Stats() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockManager)(nil).Stats)) +} + +// Stop mocks base method. +func (m *MockManager) Stop() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Stop") + ret0, _ := ret[0].(error) + return ret0 +} + +// Stop indicates an expected call of Stop. +func (mr *MockManagerMockRecorder) Stop() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stop", reflect.TypeOf((*MockManager)(nil).Stop)) +} + +// Wait mocks base method. +func (m *MockManager) Wait() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Wait") + ret0, _ := ret[0].(error) + return ret0 +} + +// Wait indicates an expected call of Wait. +func (mr *MockManagerMockRecorder) Wait() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Wait", reflect.TypeOf((*MockManager)(nil).Wait)) +} diff --git a/pkg/manager/manager_suite_test.go b/pkg/manager/manager_suite_test.go new file mode 100644 index 00000000..0f1156c3 --- /dev/null +++ b/pkg/manager/manager_suite_test.go @@ -0,0 +1,13 @@ +package manager + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestManager(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg manager Suite") +} diff --git a/pkg/manager/manager_test.go b/pkg/manager/manager_test.go new file mode 100644 index 00000000..d0a09f8b --- /dev/null +++ b/pkg/manager/manager_test.go @@ -0,0 +1,599 @@ +package manager + +import ( + stderrors "errors" + "io" + "os" + "sync/atomic" + "time" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/client" + "github.com/vesoft-inc/nebula-importer/v4/pkg/importer" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/reader" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" + + "github.com/golang/mock/gomock" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Manager", func() { + It("New", func() { + m := New(client.NewPool()) + m1, ok := m.(*defaultManager) + Expect(ok).To(BeTrue()) + Expect(m1).NotTo(BeNil()) + Expect(m1.pool).NotTo(BeNil()) + Expect(m1.getClientOptions).To(BeNil()) + Expect(m1.batch).To(Equal(0)) + Expect(m1.readerConcurrency).To(Equal(DefaultReaderConcurrency)) + Expect(m1.readerPool).NotTo(BeNil()) + Expect(m1.importerConcurrency).To(Equal(DefaultImporterConcurrency)) + Expect(m1.importerPool).NotTo(BeNil()) + Expect(m1.statsInterval).To(Equal(DefaultStatsInterval)) + Expect(m1.hooks.Before).To(BeEmpty()) + Expect(m1.hooks.After).To(BeEmpty()) + Expect(m1.logger).NotTo(BeNil()) + }) + + It("NewWithOpts", func() { + m := NewWithOpts( + WithGraphName("graphName"), + WithClientPool(client.NewPool()), + WithGetClientOptions(client.WithClientInitFunc(nil)), + WithBatch(1), + WithReaderConcurrency(DefaultReaderConcurrency+1), + WithImporterConcurrency(DefaultImporterConcurrency+1), + WithStatsInterval(DefaultStatsInterval+1), + WithBeforeHooks(&Hook{ + Statements: []string{"before statements1"}, + Wait: time.Second, + }), + WithAfterHooks(&Hook{ + Statements: []string{"after statements"}, + Wait: time.Second, + }), + WithLogger(logger.NopLogger), + ) + m1, ok := m.(*defaultManager) + Expect(ok).To(BeTrue()) + Expect(m1).NotTo(BeNil()) + Expect(m1.pool).NotTo(BeNil()) + Expect(m1.getClientOptions).NotTo(BeNil()) + Expect(m1.batch).To(Equal(1)) + Expect(m1.readerConcurrency).To(Equal(DefaultReaderConcurrency + 1)) + Expect(m1.readerPool).NotTo(BeNil()) + Expect(m1.importerConcurrency).To(Equal(DefaultImporterConcurrency + 1)) + Expect(m1.importerPool).NotTo(BeNil()) + Expect(m1.statsInterval).To(Equal(DefaultStatsInterval + 1)) + Expect(m1.hooks.Before).To(HaveLen(1)) + Expect(m1.hooks.After).To(HaveLen(1)) + Expect(m1.logger).NotTo(BeNil()) + }) + + Describe("Run", func() { + var ( + tmpdir string + ctrl *gomock.Controller + mockSource *source.MockSource + mockBatchRecordReader *reader.MockBatchRecordReader + mockClient *client.MockClient + mockClientPool *client.MockPool + mockResponse *client.MockResponse + mockImporter *importer.MockImporter + m Manager + batch = 10 + ) + BeforeEach(func() { + var err error + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + + ctrl = gomock.NewController(GinkgoT()) + mockSource = source.NewMockSource(ctrl) + mockBatchRecordReader = reader.NewMockBatchRecordReader(ctrl) + mockClient = client.NewMockClient(ctrl) + mockClientPool = client.NewMockPool(ctrl) + mockResponse = client.NewMockResponse(ctrl) + mockImporter = importer.NewMockImporter(ctrl) + + l, err := logger.New(logger.WithLevel(logger.WarnLevel)) + Expect(err).NotTo(HaveOccurred()) + m = New( + mockClientPool, + WithBatch(batch), + WithLogger(l), + WithBeforeHooks(&Hook{ + Statements: []string{"before statement"}, + Wait: time.Second, + }), + WithAfterHooks(&Hook{ + Statements: []string{"after statement"}, + Wait: time.Second, + }), + ) + }) + + AfterEach(func() { + ctrl.Finish() + err := os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }) + + It("concurrency successfully", func() { + var err error + loopCountPreFile := 10 + + fnNewSource := func() source.Source { + mockSource = source.NewMockSource(ctrl) + + mockSource.EXPECT().Name().Times(2).Return("source name") + mockSource.EXPECT().Open().Times(1).Return(nil) + mockSource.EXPECT().Size().Times(1).Return(int64(12345), nil) + mockSource.EXPECT().Close().Times(1).Return(nil) + + return mockSource + } + + fnNewBatchRecordReader := func(count int64) reader.BatchRecordReader { + mockBatchRecordReader = reader.NewMockBatchRecordReader(ctrl) + var currBatchRecordReaderCount int64 + fnReadBatch := func() (int, spec.Records, error) { + if curr := atomic.AddInt64(&currBatchRecordReaderCount, 1); curr > count { + return 0, nil, io.EOF + } + return 11, spec.Records{ + []string{"0123"}, + []string{"4567"}, + []string{"890"}, + }, nil + } + mockBatchRecordReader.EXPECT().ReadBatch().Times(int(count) + 1).DoAndReturn(fnReadBatch) + return mockBatchRecordReader + } + + var ( + batchRecordReaderNodeCount int64 = 1017 + batchRecordReaderEdgeCount int64 = 1037 + ) + + for i := 0; i < loopCountPreFile; i++ { + err = m.Import( + fnNewSource(), + fnNewBatchRecordReader(batchRecordReaderNodeCount), + mockImporter, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + err = m.Import( + fnNewSource(), + fnNewBatchRecordReader(batchRecordReaderEdgeCount), + mockImporter, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + } + + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(nil), + + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("after statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + ) + + var executeFailedTimes int64 = 10 + var currExecuteTimes int64 + fnImport := func(records ...spec.Record) (*importer.ImportResp, error) { + curr := atomic.AddInt64(&currExecuteTimes, 1) + if curr%100 == 0 && curr/100 <= executeFailedTimes { + return nil, stderrors.New("import failed") + } + return &importer.ImportResp{ + Latency: 2 * time.Microsecond, + RespTime: 3 * time.Microsecond, + }, nil + } + mockImporter.EXPECT().Wait().Times(loopCountPreFile * 4) + mockImporter.EXPECT().Import(gomock.Any()).AnyTimes().DoAndReturn(fnImport) + mockImporter.EXPECT().Done().Times(loopCountPreFile * 4) + + err = m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + s := m.Stats() + + totalBatches := (batchRecordReaderNodeCount + batchRecordReaderEdgeCount) * int64(loopCountPreFile) + processedBytes := (11*batchRecordReaderNodeCount + 11*batchRecordReaderEdgeCount) * int64(loopCountPreFile) + totalBytes := 12345 * int64(loopCountPreFile) * 2 + totalRecords := totalBatches * 3 + + Expect(s.StartTime.IsZero()).To(BeFalse()) + Expect(s.ProcessedBytes).To(Equal(processedBytes)) + Expect(s.TotalBytes).To(Equal(totalBytes)) + Expect(s.FailedRecords).NotTo(Equal(int64(0))) + Expect(s.FailedRecords).To(BeNumerically("<=", executeFailedTimes*int64(batch))) + Expect(s.TotalRecords).To(Equal(totalRecords)) + Expect(s.FailedRequest).To(Equal(executeFailedTimes)) + Expect(s.TotalRequest).To(Equal(totalBatches * 2)) + Expect(s.TotalLatency).To(Equal(2 * time.Microsecond * time.Duration((totalBatches*2)-executeFailedTimes))) + Expect(s.TotalRespTime).To(Equal(3 * time.Microsecond * time.Duration((totalBatches*2)-executeFailedTimes))) + Expect(s.FailedProcessed).NotTo(Equal(int64(0))) + Expect(s.FailedRecords).To(BeNumerically("<=", executeFailedTimes*int64(batch))) + Expect(s.TotalProcessed).To(Equal(totalRecords * 2)) + }) + + It("Import without importer", func() { + err := m.Import( + mockSource, + mockBatchRecordReader, + ) + Expect(err).NotTo(HaveOccurred()) + }) + + It("source open failed", func() { + mockSource.EXPECT().Name().Return("source name") + mockSource.EXPECT().Open().Return(os.ErrNotExist) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, os.ErrNotExist)).To(BeTrue()) + }) + + It("get client failed", func() { + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(nil, stderrors.New("test error")) + + err := m.Start() + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + }) + + It("exec before failed", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(nil, stderrors.New("test error")), + ) + + err := m.Start() + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + }) + + It("client pool open failed", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(stderrors.New("test error")), + ) + + err := m.Start() + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + }) + + It("exec after failed", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(nil), + + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("after statement").Times(1).Return(nil, stderrors.New("test error")), + ) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + }) + + It("stop successfully", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(nil), + + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("after statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + ) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + time.Sleep(100 * time.Millisecond) + + err = m.Stop() + Expect(err).NotTo(HaveOccurred()) + }) + + It("stop failed", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(nil), + + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("after statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(false), + mockResponse.EXPECT().GetError().Times(1).Return(stderrors.New("exec failed")), + ) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + time.Sleep(100 * time.Millisecond) + + err = m.Stop() + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("exec failed")) + }) + + It("stop without read finished", func() { + gomock.InOrder( + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("before statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + + mockClientPool.EXPECT().Open().Return(nil), + + mockClientPool.EXPECT().GetClient(gomock.Any()).Return(mockClient, nil), + mockClient.EXPECT().Execute("after statement").Times(1).Return(mockResponse, nil), + mockResponse.EXPECT().IsSucceed().Return(true), + ) + + mockSource.EXPECT().Name().Times(2).Return("source name") + mockSource.EXPECT().Open().Return(nil) + mockSource.EXPECT().Size().Return(int64(1024*1024*1024*1024), nil) + mockSource.EXPECT().Close().Return(nil) + + mockBatchRecordReader.EXPECT().ReadBatch().AnyTimes().Return(11, spec.Records{ + []string{"0123"}, + []string{"4567"}, + []string{"890"}, + }, nil) + + mockImporter.EXPECT().Wait().Times(1) + mockImporter.EXPECT().Import(gomock.Any()).AnyTimes().Return(&importer.ImportResp{}, nil) + mockImporter.EXPECT().Done().Times(1) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + + err = m.Start() + Expect(err).NotTo(HaveOccurred()) + + time.Sleep(100 * time.Millisecond) + + err = m.Stop() + Expect(err).NotTo(HaveOccurred()) + }) + + It("no hooks", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + + mockClientPool.EXPECT().Open().Return(nil) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("nil or empty hooks", func() { + m.(*defaultManager).hooks.Before = []*Hook{ + nil, + {Statements: []string{""}}, + } + m.(*defaultManager).hooks.After = []*Hook{ + {Statements: []string{""}}, + nil, + } + + mockClientPool.EXPECT().Open().Return(nil) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("disable stats interval", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + m.(*defaultManager).statsInterval = 0 + + mockClientPool.EXPECT().Open().Return(nil) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("stats interval print", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + m.(*defaultManager).statsInterval = 10 * time.Microsecond + + mockClientPool.EXPECT().Open().Return(nil) + + err := m.Start() + Expect(err).NotTo(HaveOccurred()) + + time.Sleep(100 * time.Millisecond) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("submit reader failed", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + m.(*defaultManager).readerPool.Release() + + mockSource.EXPECT().Name().Times(2).Return("source name") + mockSource.EXPECT().Open().Times(2).Return(nil) + mockSource.EXPECT().Size().Times(2).Return(int64(1024), nil) + mockSource.EXPECT().Close().Times(2).Return(nil) + + mockClientPool.EXPECT().Open().Return(nil) + + mockImporter.EXPECT().Done().Times(2) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + err = m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + + err = m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("submit importer failed", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + m.(*defaultManager).importerPool.Release() + + mockSource.EXPECT().Name().Times(2 + 2).Return("source name") + mockSource.EXPECT().Open().Times(2).Return(nil) + mockSource.EXPECT().Size().Times(2).Return(int64(1024), nil) + mockSource.EXPECT().Close().Times(2).Return(nil) + + gomock.InOrder( + mockBatchRecordReader.EXPECT().ReadBatch().Times(2).Return(11, spec.Records{ + []string{"0123"}, + []string{"4567"}, + []string{"890"}, + }, nil), + mockBatchRecordReader.EXPECT().ReadBatch().Times(2).Return(0, spec.Records(nil), io.EOF), + ) + + mockClientPool.EXPECT().Open().Return(nil) + + mockImporter.EXPECT().Wait().Times(2) + mockImporter.EXPECT().Done().Times(2) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + err = m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + + err = m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + + It("get size failed", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + + mockSource.EXPECT().Name().Times(2).Return("source name") + mockSource.EXPECT().Open().Times(2).Return(nil) + mockSource.EXPECT().Size().Times(2).Return(int64(0), stderrors.New("test error")) + mockSource.EXPECT().Close().Times(2).Return(nil) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + + err = m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("test error")) + }) + + It("read failed", func() { + m.(*defaultManager).hooks.Before = nil + m.(*defaultManager).hooks.After = nil + + mockClientPool.EXPECT().Open().Return(nil) + mockSource.EXPECT().Name().Times(2 + 2).Return("source name") + mockSource.EXPECT().Open().Times(2).Return(nil) + mockSource.EXPECT().Size().Times(2).Return(int64(1024), nil) + mockSource.EXPECT().Close().Times(2).Return(nil) + + mockBatchRecordReader.EXPECT().ReadBatch().Times(2).Return(0, spec.Records(nil), stderrors.New("test error")) + + mockImporter.EXPECT().Wait().Times(2) + mockImporter.EXPECT().Done().Times(2) + + err := m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + err = m.Import( + mockSource, + mockBatchRecordReader, + mockImporter, + ) + Expect(err).NotTo(HaveOccurred()) + + err = m.Start() + Expect(err).NotTo(HaveOccurred()) + + err = m.Wait() + Expect(err).NotTo(HaveOccurred()) + }) + }) +}) diff --git a/pkg/picker/config.go b/pkg/picker/config.go index 05262809..2deed1a2 100644 --- a/pkg/picker/config.go +++ b/pkg/picker/config.go @@ -1,21 +1,23 @@ package picker import ( - "fmt" "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" ) // Config is the configuration to build Picker // The priority is as follows: -// ConcatItems > Indices -// Nullable -// DefaultValue -// NullValue, if set to null, subsequent conversions will be skipped. -// Type -// Function -// CheckOnPost +// +// ConcatItems > Indices +// Nullable +// DefaultValue +// NullValue, if set to null, subsequent conversions will be skipped. +// Type +// Function +// CheckOnPost type Config struct { - ConcatItems ConcatItems // Concat index column, constant, or mixed. + ConcatItems []any // Concat index column, constant, or mixed. int for index column, string for constant. Indices []int // Set index columns, the first non-null. Nullable func(string) bool // Determine whether it is null. Optional. NullValue string // Set null value when it is null. Optional. @@ -25,13 +27,23 @@ type Config struct { CheckOnPost func(*Value) error // Set the value check function on post. } +//revive:disable-next-line:cyclomatic func (c *Config) Build() (Picker, error) { + for i := range c.Indices { + if c.Indices[i] < 0 { + return nil, errors.ErrInvalidIndex + } + } var retPicker Picker var nullHandled bool switch { - case c.ConcatItems.Len() > 0: + case len(c.ConcatItems) > 0: + concatItems := ConcatItems{} + if err := concatItems.Add(c.ConcatItems...); err != nil { + return nil, err + } retPicker = ConcatPicker{ - items: c.ConcatItems, + items: concatItems, } case len(c.Indices) == 1: retPicker = IndexPicker(c.Indices[0]) @@ -55,7 +67,7 @@ func (c *Config) Build() (Picker, error) { } nullHandled = true default: - return nil, fmt.Errorf("no indices or concat items") + return nil, errors.ErrNoIndicesOrConcatItems } var converters []Converter @@ -77,8 +89,11 @@ func (c *Config) Build() (Picker, error) { }) } } - - converters = append(converters, NewTypeConverter(c.Type)) + typeConverter, err := NewTypeConverter(c.Type) + if err != nil { + return nil, err + } + converters = append(converters, typeConverter) if c.Function != nil && *c.Function != "" { var functionConverter Converter = FunctionConverter{ @@ -95,6 +110,7 @@ func (c *Config) Build() (Picker, error) { if c.CheckOnPost != nil { converters = append(converters, ConverterFunc(func(v *Value) (*Value, error) { if err := c.CheckOnPost(v); err != nil { + v.Release() return nil, err } return v, nil diff --git a/pkg/picker/config_test.go b/pkg/picker/config_test.go index 2a724862..826dce92 100644 --- a/pkg/picker/config_test.go +++ b/pkg/picker/config_test.go @@ -1,918 +1,877 @@ package picker import ( + stderrors "errors" "fmt" - "testing" - "github.com/stretchr/testify/assert" -) - -func TestConfigBuildFailed(t *testing.T) { - var c Config - p, err := c.Build() - assert.Error(t, err) - assert.Nil(t, p) -} - -func TestConverters(t *testing.T) { - var converter Converter = Converters(nil) - v, err := converter.Convert(&Value{ - Val: "v", - }) - assert.NoError(t, err) - assert.Equal(t, &Value{ - Val: "v", - }, v) - - converter = Converters{NonConverter{}, ErrorConverter{Err: fmt.Errorf("test error")}} - v, err = converter.Convert(&Value{}) - assert.Error(t, err) - assert.Equal(t, "test error", err.Error()) - assert.Nil(t, v) + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" - converter = NullableConverters(nil) - v, err = converter.Convert(&Value{ - Val: "v", - }) - assert.NoError(t, err) - assert.Equal(t, &Value{ - Val: "v", - }, v) + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) - v, err = converter.Convert(&Value{ - Val: "v", - IsNull: true, - isSetNull: true, +var _ = Describe("Config", func() { + It("build failed", func() { + var c Config + p, err := c.Build() + Expect(err).To(HaveOccurred()) + Expect(p).To(BeNil()) }) - assert.NoError(t, err) - assert.Equal(t, &Value{ - Val: "v", - IsNull: true, - isSetNull: true, - }, v) -} -func TestConfig(t *testing.T) { - var ( - strEmpty = "" - strStr1 = "str1" - strInt1 = "1" - strFunHash = "hash" - ) - type recordCase struct { - record []string - wantValue *Value - wantErrString string - } - testcases := []struct { - name string - c Config - fn func(*Config) - cases []recordCase - }{ - { - name: "index BOOL", - c: Config{ - Indices: []int{1}, - Type: "BOOL", - }, - cases: []recordCase{ - { - record: nil, - wantErrString: "prop index 1 out range 0 of record", - }, - { - record: []string{}, - wantErrString: "prop index 1 out range 0 of record", - }, - { - record: []string{"0"}, - wantErrString: "prop index 1 out range 1 of record", - }, - { - record: []string{"0", "1"}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - { - record: []string{"0", "1", "2"}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - }, - }, - { - name: "index iNt", - c: Config{ - Indices: []int{1}, - Type: "iNt", - }, - cases: []recordCase{ - { - record: nil, - wantErrString: "prop index 1 out range 0 of record", - }, - { - record: []string{}, - wantErrString: "prop index 1 out range 0 of record", - }, - { - record: []string{"0"}, - wantErrString: "prop index 1 out range 1 of record", - }, - { - record: []string{"0", "1"}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - { - record: []string{"0", "1", "2"}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - }, - }, - { - name: "index Float", - c: Config{ - Indices: []int{2}, - Type: "Float", - }, - cases: []recordCase{ - { - record: []string{"0", "1.1", "2.2"}, - wantValue: &Value{Val: "2.2", IsNull: false}, - }, - }, - }, - { - name: "index double", - c: Config{ - Indices: []int{3}, - Type: "double", - }, - cases: []recordCase{ - { - record: []string{"0", "1.1", "2.2", "3.3"}, - wantValue: &Value{Val: "3.3", IsNull: false}, - }, - }, - }, - { - name: "index string", - c: Config{ - Indices: []int{1}, - Type: "string", - }, - cases: []recordCase{ - { - record: []string{"0", "str1", "str2"}, - wantValue: &Value{Val: "\"str1\"", IsNull: false}, - }, - }, - }, - { - name: "index date", - c: Config{ - Indices: []int{0}, - Type: "date", - }, - cases: []recordCase{ - { - record: []string{"2020-01-02"}, - wantValue: &Value{Val: "DATE(\"2020-01-02\")", IsNull: false}, - }, - }, - }, - { - name: "index time", - c: Config{ - Indices: []int{0}, - Type: "time", - }, - cases: []recordCase{ - { - record: []string{"18:38:23.284"}, - wantValue: &Value{Val: "TIME(\"18:38:23.284\")", IsNull: false}, - }, - }, - }, - { - name: "index datetime", - c: Config{ - Indices: []int{0}, - Type: "datetime", - }, - cases: []recordCase{ - { - record: []string{"2020-01-11T19:28:23.284"}, - wantValue: &Value{Val: "DATETIME(\"2020-01-11T19:28:23.284\")", IsNull: false}, - }, - }, - }, - { - name: "index timestamp", - c: Config{ - Indices: []int{0}, - Type: "timestamp", - }, - cases: []recordCase{ - { - record: []string{"2020-01-11T19:28:23"}, - wantValue: &Value{Val: "TIMESTAMP(\"2020-01-11T19:28:23\")", IsNull: false}, - }, - { - record: []string{"1578770903"}, - wantValue: &Value{Val: "TIMESTAMP(1578770903)", IsNull: false}, - }, - { - record: []string{""}, - wantValue: &Value{Val: "TIMESTAMP(\"\")", IsNull: false}, - }, - { - record: []string{"0"}, - wantValue: &Value{Val: "TIMESTAMP(0)", IsNull: false}, - }, - { - record: []string{"12"}, - wantValue: &Value{Val: "TIMESTAMP(12)", IsNull: false}, - }, - { - record: []string{"0x"}, - wantValue: &Value{Val: "TIMESTAMP(\"0x\")", IsNull: false}, - }, - { - record: []string{"0X"}, - wantValue: &Value{Val: "TIMESTAMP(\"0X\")", IsNull: false}, - }, - { - record: []string{"0123456789"}, - wantValue: &Value{Val: "TIMESTAMP(0123456789)", IsNull: false}, - }, - { - record: []string{"9876543210"}, - wantValue: &Value{Val: "TIMESTAMP(9876543210)", IsNull: false}, - }, - { - record: []string{"0x0123456789abcdef"}, - wantValue: &Value{Val: "TIMESTAMP(0x0123456789abcdef)", IsNull: false}, - }, - { - record: []string{"0X0123456789ABCDEF"}, - wantValue: &Value{Val: "TIMESTAMP(0X0123456789ABCDEF)", IsNull: false}, - }, - }, - }, - { - name: "index geography", - c: Config{ - Indices: []int{0}, - Type: "geography", - }, - cases: []recordCase{ - { - record: []string{"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))"}, - wantValue: &Value{Val: "ST_GeogFromText(\"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))\")", IsNull: false}, - }, - }, - }, - { - name: "index geography(point)", - c: Config{ - Indices: []int{0}, - Type: "geography(point)", - }, - cases: []recordCase{ - { - record: []string{"Point(0.0 0.0)"}, - wantValue: &Value{Val: "ST_GeogFromText(\"Point(0.0 0.0)\")", IsNull: false}, - }, - }, - }, - { - name: "index geography(linestring)", - c: Config{ - Indices: []int{0}, - Type: "geography(linestring)", - }, - cases: []recordCase{ - { - record: []string{"linestring(0 1, 179.99 89.99)"}, - wantValue: &Value{Val: "ST_GeogFromText(\"linestring(0 1, 179.99 89.99)\")", IsNull: false}, - }, - }, - }, - { - name: "index geography(polygon)", - c: Config{ - Indices: []int{0}, - Type: "geography(polygon)", - }, - cases: []recordCase{ - { - record: []string{"polygon((0 1, 2 4, 3 5, 4 9, 0 1))"}, - wantValue: &Value{Val: "ST_GeogFromText(\"polygon((0 1, 2 4, 3 5, 4 9, 0 1))\")", IsNull: false}, - }, - }, - }, - { - name: "index unsupported type", - c: Config{ - Indices: []int{0}, - Type: "unsupported", - }, - cases: []recordCase{ - { - record: []string{""}, - wantErrString: "unsupported type", - }, - }, - }, - { - name: "index Nullable", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "", IsNull: true}, - }, - }, - }, - { - name: "index Nullable value", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "", - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "", IsNull: true}, - }, - }, - }, - { - name: "index Nullable value changed", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "__NULL__" - }, - NullValue: "NULL", - }, - cases: []recordCase{ - { - record: []string{"str0", "__NULL__", "str2", "str3"}, - wantValue: &Value{Val: "NULL", IsNull: true}, - }, - }, - }, - { - name: "index not Nullable", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: nil, - NullValue: "NULL", - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "index not Nullable defaultValue", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: nil, - NullValue: "NULL", - DefaultValue: &strStr1, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "index defaultValue string", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "\"str1\"", IsNull: false}, - }, - }, - }, - { - name: "index defaultValue string empty", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "_NULL_" - }, - NullValue: "NULL", - DefaultValue: &strEmpty, - }, - cases: []recordCase{ - { - record: []string{"str0", "_NULL_", "str2", "str3"}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "index defaultValue int", - c: Config{ - Indices: []int{1}, - Type: "int", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strInt1, - }, - cases: []recordCase{ - { - record: []string{"0", "", "2", "3"}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - }, - }, - { - name: "index Function string", - c: Config{ - Indices: []int{1}, - Type: "string", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "str1"}, - wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, - }, - }, - }, - { - name: "index Function int", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "int", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"0", "1"}, - wantValue: &Value{Val: "hash(\"1\")", IsNull: false}, - }, - }, - }, - { - name: "index Function Nullable", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "NULL", IsNull: true}, - }, - }, - }, - { - name: "index Function defaultValue", - c: Config{ - Indices: []int{1}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, - }, - }, - }, - { - name: "indices", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - }, - cases: []recordCase{ - { - record: []string{"str0", "", "str2", "str3"}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "indices unsupported type", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "unsupported", - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantErrString: "unsupported type", - }, - }, - }, - { - name: "indices Nullable unsupported type", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "unsupported", - Nullable: func(s string) bool { - return s == "" - }, - DefaultValue: &strEmpty, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantErrString: "unsupported type", - }, - }, - }, - { - name: "indices Nullable", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - }, - cases: []recordCase{ - { - record: []string{"str0", "", ""}, - wantErrString: "prop index 3 out range 3 of record", - }, - { - record: []string{"str0", "", "", "str3"}, - wantValue: &Value{Val: "\"str3\"", IsNull: false}, - }, - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "", IsNull: true}, - }, - }, - }, - { - name: "indices Nullable value", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "", - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "", IsNull: true}, - }, - }, - }, - { - name: "indices Nullable value changed", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "__NULL__" - }, - NullValue: "NULL", - }, - cases: []recordCase{ - { - record: []string{"str0", "__NULL__", "__NULL__", "__NULL__"}, - wantValue: &Value{Val: "NULL", IsNull: true}, - }, - }, - }, - { - name: "indices not Nullable", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: nil, - NullValue: "NULL", - }, - cases: []recordCase{ - { - record: []string{""}, - wantErrString: "prop index 1 out range 1 of record", - }, - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "indices not Nullable defaultValue", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: nil, - NullValue: "NULL", - DefaultValue: &strStr1, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "indices defaultValue string", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "\"str1\"", IsNull: false}, - }, - }, - }, - { - name: "indices defaultValue string empty", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "_NULL_" - }, - NullValue: "NULL", - DefaultValue: &strEmpty, - }, - cases: []recordCase{ - { - record: []string{"str0", "_NULL_", "_NULL_", "_NULL_"}, - wantValue: &Value{Val: "\"\"", IsNull: false}, - }, - }, - }, - { - name: "indices defaultValue int", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "int", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strInt1, - }, - cases: []recordCase{ - { - record: []string{"0", "", "", ""}, - wantValue: &Value{Val: "1", IsNull: false}, - }, - }, - }, - { - name: "indices Function string", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "str1"}, - wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, - }, - }, - }, - { - name: "indices Function int", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "int", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"0", "1"}, - wantValue: &Value{Val: "hash(\"1\")", IsNull: false}, - }, - }, - }, - { - name: "indices Function Nullable", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "NULL", IsNull: true}, - }, - }, - }, - { - name: "indices Function defaultValue", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - Function: &strFunHash, - }, - cases: []recordCase{ - { - record: []string{"str0", "", "", ""}, - wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, - }, - }, - }, - { - name: "concat items", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - }, - fn: func(c *Config) { - c.ConcatItems. - AddConstant("c1"). - AddIndex(4). - AddIndex(5). - AddConstant("c2"). - AddIndex(6). - AddConstant("c3") - }, - cases: []recordCase{ - { - record: []string{"str0", "str1", "str2", "str3", "str4", "str5"}, - wantErrString: "prop index 6 out range 6 of record", - }, - { - record: []string{"str0", "str1", "str2", "str3", "str4", "str5", "str6"}, - wantValue: &Value{Val: "\"c1str4str5c2str6c3\"", IsNull: false}, - }, - { - record: []string{"", "", "", "", "", "", ""}, - wantValue: &Value{Val: "\"c1c2c3\"", IsNull: false}, - }, - { - record: []string{"", "", "", "", "str4", "", ""}, - wantValue: &Value{Val: "\"c1str4c2c3\"", IsNull: false}, - }, - }, - }, - { - name: "concat items Function", - c: Config{ - Indices: []int{1, 2, 3}, - Type: "string", - Nullable: func(s string) bool { - return s == "" - }, - NullValue: "NULL", - DefaultValue: &strStr1, - Function: &strFunHash, - }, - fn: func(c *Config) { - c.ConcatItems. - AddConstant("c1"). - AddIndex(4). - AddIndex(5). - AddConstant("c2"). - AddIndex(6). - AddConstant("c3") - }, - cases: []recordCase{ - { - record: []string{"str0", "str1", "str2", "str3", "str4", "str5"}, - wantErrString: "prop index 6 out range 6 of record", - }, - { - record: []string{"str0", "str1", "str2", "str3", "str4", "str5", "str6"}, - wantValue: &Value{Val: "hash(\"c1str4str5c2str6c3\")", IsNull: false}, - }, - { - record: []string{"", "", "", "", "", "", ""}, - wantValue: &Value{Val: "hash(\"c1c2c3\")", IsNull: false}, - }, - { - record: []string{"", "", "", "", "str4", "", ""}, - wantValue: &Value{Val: "hash(\"c1str4c2c3\")", IsNull: false}, - }, - }, - }, - { - name: "check", - c: Config{ - Indices: []int{1}, - Type: "string", - CheckOnPost: func(value *Value) error { - return nil - }, - }, - cases: []recordCase{ - { - record: []string{"0", "str1", "str2"}, - wantValue: &Value{Val: "\"str1\"", IsNull: false}, - }, - }, - }, - { - name: "check failed", - c: Config{ - Indices: []int{1}, - Type: "string", - CheckOnPost: func(value *Value) error { - return fmt.Errorf("check failed") - }, - }, - cases: []recordCase{ - { - record: []string{"0", "str1", "str2"}, - wantErrString: "check failed", - }, - }, - }, - } + Describe("Config cases", func() { + var ( + strEmpty = "" + strStr1 = "str1" + strInt1 = "1" + strFunHash = "hash" + ) + type recordCase struct { + record []string + wantValue *Value + wantErr error + wantErrString string + } + testcases := []struct { + name string + c Config + buildErr error + cases []recordCase + }{ + { + name: "index BOOL", + c: Config{ + Indices: []int{1}, + Type: "BOOL", + }, + cases: []recordCase{ + { + record: nil, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"0"}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"0", "1"}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + { + record: []string{"0", "1", "2"}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + }, + }, + { + name: "index iNt", + c: Config{ + Indices: []int{1}, + Type: "iNt", + }, + cases: []recordCase{ + { + record: nil, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"0"}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"0", "1"}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + { + record: []string{"0", "1", "2"}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + }, + }, + { + name: "index Float", + c: Config{ + Indices: []int{2}, + Type: "Float", + }, + cases: []recordCase{ + { + record: []string{"0", "1.1", "2.2"}, + wantValue: &Value{Val: "2.2", IsNull: false}, + }, + }, + }, + { + name: "index double", + c: Config{ + Indices: []int{3}, + Type: "double", + }, + cases: []recordCase{ + { + record: []string{"0", "1.1", "2.2", "3.3"}, + wantValue: &Value{Val: "3.3", IsNull: false}, + }, + }, + }, + { + name: "index string", + c: Config{ + Indices: []int{1}, + Type: "string", + }, + cases: []recordCase{ + { + record: []string{"0", "str1", "str2"}, + wantValue: &Value{Val: "\"str1\"", IsNull: false}, + }, + }, + }, + { + name: "index date", + c: Config{ + Indices: []int{0}, + Type: "date", + }, + cases: []recordCase{ + { + record: []string{"2020-01-02"}, + wantValue: &Value{Val: "DATE(\"2020-01-02\")", IsNull: false}, + }, + }, + }, + { + name: "index time", + c: Config{ + Indices: []int{0}, + Type: "time", + }, + cases: []recordCase{ + { + record: []string{"18:38:23.284"}, + wantValue: &Value{Val: "TIME(\"18:38:23.284\")", IsNull: false}, + }, + }, + }, + { + name: "index datetime", + c: Config{ + Indices: []int{0}, + Type: "datetime", + }, + cases: []recordCase{ + { + record: []string{"2020-01-11T19:28:23.284"}, + wantValue: &Value{Val: "DATETIME(\"2020-01-11T19:28:23.284\")", IsNull: false}, + }, + }, + }, + { + name: "index timestamp", + c: Config{ + Indices: []int{0}, + Type: "timestamp", + }, + cases: []recordCase{ + { + record: []string{"2020-01-11T19:28:23"}, + wantValue: &Value{Val: "TIMESTAMP(\"2020-01-11T19:28:23\")", IsNull: false}, + }, + { + record: []string{"1578770903"}, + wantValue: &Value{Val: "TIMESTAMP(1578770903)", IsNull: false}, + }, + { + record: []string{""}, + wantValue: &Value{Val: "TIMESTAMP(\"\")", IsNull: false}, + }, + { + record: []string{"0"}, + wantValue: &Value{Val: "TIMESTAMP(0)", IsNull: false}, + }, + { + record: []string{"12"}, + wantValue: &Value{Val: "TIMESTAMP(12)", IsNull: false}, + }, + { + record: []string{"0x"}, + wantValue: &Value{Val: "TIMESTAMP(\"0x\")", IsNull: false}, + }, + { + record: []string{"0X"}, + wantValue: &Value{Val: "TIMESTAMP(\"0X\")", IsNull: false}, + }, + { + record: []string{"0123456789"}, + wantValue: &Value{Val: "TIMESTAMP(0123456789)", IsNull: false}, + }, + { + record: []string{"9876543210"}, + wantValue: &Value{Val: "TIMESTAMP(9876543210)", IsNull: false}, + }, + { + record: []string{"0x0123456789abcdef"}, + wantValue: &Value{Val: "TIMESTAMP(0x0123456789abcdef)", IsNull: false}, + }, + { + record: []string{"0X0123456789ABCDEF"}, + wantValue: &Value{Val: "TIMESTAMP(0X0123456789ABCDEF)", IsNull: false}, + }, + }, + }, + { + name: "index geography", + c: Config{ + Indices: []int{0}, + Type: "geography", + }, + cases: []recordCase{ + { + record: []string{"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))"}, + wantValue: &Value{Val: "ST_GeogFromText(\"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))\")", IsNull: false}, + }, + }, + }, + { + name: "index geography(point)", + c: Config{ + Indices: []int{0}, + Type: "geography(point)", + }, + cases: []recordCase{ + { + record: []string{"Point(0.0 0.0)"}, + wantValue: &Value{Val: "ST_GeogFromText(\"Point(0.0 0.0)\")", IsNull: false}, + }, + }, + }, + { + name: "index geography(linestring)", + c: Config{ + Indices: []int{0}, + Type: "geography(linestring)", + }, + cases: []recordCase{ + { + record: []string{"linestring(0 1, 179.99 89.99)"}, + wantValue: &Value{Val: "ST_GeogFromText(\"linestring(0 1, 179.99 89.99)\")", IsNull: false}, + }, + }, + }, + { + name: "index geography(polygon)", + c: Config{ + Indices: []int{0}, + Type: "geography(polygon)", + }, + cases: []recordCase{ + { + record: []string{"polygon((0 1, 2 4, 3 5, 4 9, 0 1))"}, + wantValue: &Value{Val: "ST_GeogFromText(\"polygon((0 1, 2 4, 3 5, 4 9, 0 1))\")", IsNull: false}, + }, + }, + }, + { + name: "index unsupported type", + c: Config{ + Indices: []int{0}, + Type: "unsupported", + }, + buildErr: errors.ErrUnsupportedValueType, + }, + { + name: "index invalid", + c: Config{ + Indices: []int{-1}, + }, + buildErr: errors.ErrInvalidIndex, + }, + { + name: "concat items index invalid", + c: Config{ + ConcatItems: []any{"str", -1}, + }, + buildErr: errors.ErrInvalidIndex, + }, + { + name: "index Nullable", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "", IsNull: true}, + }, + }, + }, + { + name: "index Nullable value", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "", + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "", IsNull: true}, + }, + }, + }, + { + name: "index Nullable value changed", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "__NULL__" + }, + NullValue: "NULL", + }, + cases: []recordCase{ + { + record: []string{"str0", "__NULL__", "str2", "str3"}, + wantValue: &Value{Val: "NULL", IsNull: true}, + }, + }, + }, + { + name: "index not Nullable", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: nil, + NullValue: "NULL", + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "index not Nullable defaultValue", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: nil, + NullValue: "NULL", + DefaultValue: &strStr1, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "index defaultValue string", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "\"str1\"", IsNull: false}, + }, + }, + }, + { + name: "index defaultValue string empty", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "_NULL_" + }, + NullValue: "NULL", + DefaultValue: &strEmpty, + }, + cases: []recordCase{ + { + record: []string{"str0", "_NULL_", "str2", "str3"}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "index defaultValue int", + c: Config{ + Indices: []int{1}, + Type: "int", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strInt1, + }, + cases: []recordCase{ + { + record: []string{"0", "", "2", "3"}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + }, + }, + { + name: "index Function string", + c: Config{ + Indices: []int{1}, + Type: "string", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "str1"}, + wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, + }, + }, + }, + { + name: "index Function int", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "int", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"0", "1"}, + wantValue: &Value{Val: "hash(\"1\")", IsNull: false}, + }, + }, + }, + { + name: "index Function Nullable", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "NULL", IsNull: true}, + }, + }, + }, + { + name: "index Function defaultValue", + c: Config{ + Indices: []int{1}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, + }, + }, + }, + { + name: "indices", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + }, + cases: []recordCase{ + { + record: []string{"str0", "", "str2", "str3"}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "indices unsupported type", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "unsupported", + }, + buildErr: errors.ErrUnsupportedValueType, + }, + { + name: "indices Nullable unsupported type", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "unsupported", + Nullable: func(s string) bool { + return s == "" + }, + DefaultValue: &strEmpty, + }, + buildErr: errors.ErrUnsupportedValueType, + }, + { + name: "indices Nullable", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + }, + cases: []recordCase{ + { + record: []string{"str0", "", ""}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"str0", "", "", "str3"}, + wantValue: &Value{Val: "\"str3\"", IsNull: false}, + }, + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "", IsNull: true}, + }, + }, + }, + { + name: "indices Nullable value", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "", + }, + cases: []recordCase{ + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "", IsNull: true}, + }, + }, + }, + { + name: "indices Nullable value changed", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "__NULL__" + }, + NullValue: "NULL", + }, + cases: []recordCase{ + { + record: []string{"str0", "__NULL__", "__NULL__", "__NULL__"}, + wantValue: &Value{Val: "NULL", IsNull: true}, + }, + }, + }, + { + name: "indices not Nullable", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: nil, + NullValue: "NULL", + }, + cases: []recordCase{ + { + record: []string{""}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "indices not Nullable defaultValue", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: nil, + NullValue: "NULL", + DefaultValue: &strStr1, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "indices defaultValue string", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "\"str1\"", IsNull: false}, + }, + }, + }, + { + name: "indices defaultValue string empty", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "_NULL_" + }, + NullValue: "NULL", + DefaultValue: &strEmpty, + }, + cases: []recordCase{ + { + record: []string{"str0", "_NULL_", "_NULL_", "_NULL_"}, + wantValue: &Value{Val: "\"\"", IsNull: false}, + }, + }, + }, + { + name: "indices defaultValue int", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "int", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strInt1, + }, + cases: []recordCase{ + { + record: []string{"0", "", "", ""}, + wantValue: &Value{Val: "1", IsNull: false}, + }, + }, + }, + { + name: "indices Function string", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "str1"}, + wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, + }, + }, + }, + { + name: "indices Function int", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "int", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"0", "1"}, + wantValue: &Value{Val: "hash(\"1\")", IsNull: false}, + }, + }, + }, + { + name: "indices Function Nullable", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "NULL", IsNull: true}, + }, + }, + }, + { + name: "indices Function defaultValue", + c: Config{ + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "", "", ""}, + wantValue: &Value{Val: "hash(\"str1\")", IsNull: false}, + }, + }, + }, + { + name: "concat items", + c: Config{ + ConcatItems: []any{"c1", 4, 5, "c2", 6, "c3"}, + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + }, + cases: []recordCase{ + { + record: []string{"str0", "str1", "str2", "str3", "str4", "str5"}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"str0", "str1", "str2", "str3", "str4", "str5", "str6"}, + wantValue: &Value{Val: "\"c1str4str5c2str6c3\"", IsNull: false}, + }, + { + record: []string{"", "", "", "", "", "", ""}, + wantValue: &Value{Val: "\"c1c2c3\"", IsNull: false}, + }, + { + record: []string{"", "", "", "", "str4", "", ""}, + wantValue: &Value{Val: "\"c1str4c2c3\"", IsNull: false}, + }, + }, + }, + { + name: "concat items Function", + c: Config{ + ConcatItems: []any{"c1", 4, 5, "c2", 6, "c3"}, + Indices: []int{1, 2, 3}, + Type: "string", + Nullable: func(s string) bool { + return s == "" + }, + NullValue: "NULL", + DefaultValue: &strStr1, + Function: &strFunHash, + }, + cases: []recordCase{ + { + record: []string{"str0", "str1", "str2", "str3", "str4", "str5"}, + wantErr: errors.ErrNoRecord, + }, + { + record: []string{"str0", "str1", "str2", "str3", "str4", "str5", "str6"}, + wantValue: &Value{Val: "hash(\"c1str4str5c2str6c3\")", IsNull: false}, + }, + { + record: []string{"", "", "", "", "", "", ""}, + wantValue: &Value{Val: "hash(\"c1c2c3\")", IsNull: false}, + }, + { + record: []string{"", "", "", "", "str4", "", ""}, + wantValue: &Value{Val: "hash(\"c1str4c2c3\")", IsNull: false}, + }, + }, + }, + { + name: "check", + c: Config{ + Indices: []int{1}, + Type: "string", + CheckOnPost: func(value *Value) error { + return nil + }, + }, + cases: []recordCase{ + { + record: []string{"0", "str1", "str2"}, + wantValue: &Value{Val: "\"str1\"", IsNull: false}, + }, + }, + }, + { + name: "check failed", + c: Config{ + Indices: []int{1}, + Type: "string", + CheckOnPost: func(value *Value) error { + return fmt.Errorf("check failed") + }, + }, + cases: []recordCase{ + { + record: []string{"0", "str1", "str2"}, + wantErrString: "check failed", + }, + }, + }, + } - for _, tc := range testcases { - t.Run(tc.name, func(t *testing.T) { - ast := assert.New(t) - if tc.fn != nil { - tc.fn(&tc.c) - } - p, err := tc.c.Build() - ast.NoError(err) - for i, c := range tc.cases { - v, err := p.Pick(c.record) - if c.wantErrString == "" { - ast.NoError(err, "%d %v", i, c.record) - // isSetNull must equal to IsNull - c.wantValue.isSetNull = c.wantValue.IsNull - ast.Equal(c.wantValue, v, "%d %v", i, c.record) + for _, tc := range testcases { + tc := tc + It(tc.name, func() { + p, err := tc.c.Build() + if tc.buildErr != nil { + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, tc.buildErr)).To(BeTrue()) } else { - ast.Error(err, "%d %v", i, c.record) - ast.Contains(err.Error(), c.wantErrString, "%d %v", i, c.record) - ast.Nil(v, "%d %v", i, c.record) + Expect(err).NotTo(HaveOccurred()) } - } - }) - } -} + + for i, c := range tc.cases { + v, err := p.Pick(c.record) + if c.wantErr == nil && c.wantErrString == "" { + Expect(err).NotTo(HaveOccurred(), "%d %v", i, c.record) + // isSetNull must equal to IsNull + c.wantValue.isSetNull = c.wantValue.IsNull + Expect(c.wantValue).To(Equal(v), "%d %v", i, c.record) + } else { + Expect(err).To(HaveOccurred(), "%d %v", i, c.record) + if c.wantErr != nil { + Expect(stderrors.Is(err, c.wantErr)).To(BeTrue(), "%d %v", i, c.record) + } + if c.wantErrString != "" { + Expect(err.Error()).To(ContainSubstring(c.wantErrString), "%d %v", i, c.record) + } + Expect(v).To(BeNil()) + } + } + }) + } + }) +}) diff --git a/pkg/picker/converter-error.go b/pkg/picker/converter-error.go deleted file mode 100644 index a5c0f034..00000000 --- a/pkg/picker/converter-error.go +++ /dev/null @@ -1,11 +0,0 @@ -package picker - -var _ Converter = ErrorConverter{} - -type ErrorConverter struct { - Err error -} - -func (ec ErrorConverter) Convert(v *Value) (*Value, error) { - return nil, ec.Err -} diff --git a/pkg/picker/converter-default.go b/pkg/picker/converter_default.go similarity index 100% rename from pkg/picker/converter-default.go rename to pkg/picker/converter_default.go diff --git a/pkg/picker/converter_default_test.go b/pkg/picker/converter_default_test.go new file mode 100644 index 00000000..be66ddb5 --- /dev/null +++ b/pkg/picker/converter_default_test.go @@ -0,0 +1,32 @@ +package picker + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("DefaultConverter", func() { + It("normal DefaultConverter", func() { + var converter Converter = DefaultConverter{ + Value: "default", + } + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + + v, err = converter.Convert(&Value{ + Val: "v", + IsNull: true, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "default", + IsNull: false, + })) + }) +}) diff --git a/pkg/picker/converter-function.go b/pkg/picker/converter_function.go similarity index 86% rename from pkg/picker/converter-function.go rename to pkg/picker/converter_function.go index 33c83d47..f57af723 100644 --- a/pkg/picker/converter-function.go +++ b/pkg/picker/converter_function.go @@ -1,6 +1,8 @@ package picker -import "fmt" +import ( + "strconv" +) var ( _ Converter = FunctionConverter{} @@ -22,7 +24,7 @@ func (fc FunctionConverter) Convert(v *Value) (*Value, error) { } func (fc FunctionStringConverter) Convert(v *Value) (*Value, error) { - v.Val = getFuncValue(fc.Name, fmt.Sprintf("%q", v.Val)) + v.Val = getFuncValue(fc.Name, strconv.Quote(v.Val)) return v, nil } diff --git a/pkg/picker/converter_function_test.go b/pkg/picker/converter_function_test.go new file mode 100644 index 00000000..2f70830c --- /dev/null +++ b/pkg/picker/converter_function_test.go @@ -0,0 +1,38 @@ +package picker + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("FunctionConverter", func() { + It("normal FunctionConverter", func() { + var converter Converter = FunctionConverter{ + Name: "testFunc", + } + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "testFunc(v)", + })) + }) +}) + +var _ = Describe("FunctionStringConverter", func() { + It("normal FunctionStringConverter", func() { + var converter Converter = FunctionStringConverter{ + Name: "testFunc", + } + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "testFunc(\"v\")", + })) + }) +}) diff --git a/pkg/picker/converter-non.go b/pkg/picker/converter_non.go similarity index 100% rename from pkg/picker/converter-non.go rename to pkg/picker/converter_non.go diff --git a/pkg/picker/converter_non_test.go b/pkg/picker/converter_non_test.go new file mode 100644 index 00000000..a1fcc2d1 --- /dev/null +++ b/pkg/picker/converter_non_test.go @@ -0,0 +1,20 @@ +package picker + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("NonConverter", func() { + It("normal NonConverter", func() { + var converter Converter = NonConverter{} + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + }) +}) diff --git a/pkg/picker/converter-null.go b/pkg/picker/converter_null.go similarity index 100% rename from pkg/picker/converter-null.go rename to pkg/picker/converter_null.go diff --git a/pkg/picker/converter_null_test.go b/pkg/picker/converter_null_test.go new file mode 100644 index 00000000..f094a9d2 --- /dev/null +++ b/pkg/picker/converter_null_test.go @@ -0,0 +1,60 @@ +package picker + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("NullConverter", func() { + It("normal NullConverter", func() { + var converter Converter = NullConverter{ + Value: "NULL", + } + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + + v, err = converter.Convert(&Value{ + Val: "v", + IsNull: true, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "NULL", + IsNull: true, + isSetNull: true, + })) + }) +}) + +var _ = Describe("NullableConverter", func() { + It("normal NullableConverter", func() { + var converter Converter = NullableConverter{ + Nullable: func(s string) bool { + return s == "NULL" + }, + } + + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + + v, err = converter.Convert(&Value{ + Val: "NULL", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "NULL", + IsNull: true, + })) + }) +}) diff --git a/pkg/picker/converter_test.go b/pkg/picker/converter_test.go new file mode 100644 index 00000000..4a7c1662 --- /dev/null +++ b/pkg/picker/converter_test.go @@ -0,0 +1,212 @@ +package picker + +import ( + stderrors "errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConverterFunc", func() { + It("normal Converters", func() { + var converter Converter = ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test " + v.Val + return v, nil + }) + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test v", + })) + }) +}) + +var _ = Describe("Converters", func() { + It("nil Converters", func() { + var converter Converter = Converters(nil) + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + }) + + It("one Converters", func() { + converter := Converters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test " + v.Val + return v, nil + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test v", + })) + }) + + It("many Converters", func() { + converter := Converters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test1 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test2 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test3 " + v.Val + return v, nil + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test3 test2 test1 v", + })) + }) + + It("many Converters failed", func() { + converter := Converters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test1 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test2 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + return nil, stderrors.New("test failed") + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test failed")) + Expect(v).To(BeNil()) + }) +}) + +var _ = Describe("NullableConverters", func() { + It("nil NullableConverters", func() { + converter := NullableConverters(nil) + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + + v, err = converter.Convert(&Value{ + Val: "v", + IsNull: true, + isSetNull: true, + }) + + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + IsNull: true, + isSetNull: true, + })) + }) + + It("one NullableConverters", func() { + converter := NullableConverters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test " + v.Val + return v, nil + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test v", + })) + }) + + It("many NullableConverters", func() { + converter := NullableConverters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test1 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test2 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test3 " + v.Val + return v, nil + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test3 test2 test1 v", + })) + }) + + It("many NullableConverters failed", func() { + converter := NullableConverters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test1 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test2 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + return nil, stderrors.New("test failed") + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test failed")) + Expect(v).To(BeNil()) + }) + + It("many NullableConverters isSetNull", func() { + converter := NullableConverters{ + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test1 " + v.Val + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "" + v.isSetNull = true + return v, nil + }), + ConverterFunc(func(v *Value) (*Value, error) { + return nil, stderrors.New("test failed") + }), + } + v, err := converter.Convert(&Value{ + Val: "v", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "", + isSetNull: true, + })) + }) +}) diff --git a/pkg/picker/converter-type.go b/pkg/picker/converter_type.go similarity index 79% rename from pkg/picker/converter-type.go rename to pkg/picker/converter_type.go index 9b0a6bcb..df2b3b56 100644 --- a/pkg/picker/converter-type.go +++ b/pkg/picker/converter_type.go @@ -1,10 +1,11 @@ package picker import ( - "fmt" + "strconv" "strings" - "github.com/vesoft-inc/nebula-importer/v3/pkg/utils" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" ) var ( @@ -54,30 +55,30 @@ type ( TypeGeoPolygonConverter = FunctionStringConverter ) -func NewTypeConverter(t string) Converter { +func NewTypeConverter(t string) (Converter, error) { switch strings.ToUpper(t) { case "BOOL": - return TypeBoolConverter{} + return TypeBoolConverter{}, nil case "INT": - return TypeIntConverter{} + return TypeIntConverter{}, nil case "FLOAT": - return TypeFloatConverter{} + return TypeFloatConverter{}, nil case "DOUBLE": - return TypeDoubleConverter{} + return TypeDoubleConverter{}, nil case "STRING": - return TypeStringConverter{} + return TypeStringConverter{}, nil case "DATE": return TypeDateConverter{ Name: "DATE", - } + }, nil case "TIME": return TypeTimeConverter{ Name: "TIME", - } + }, nil case "DATETIME": return TypeDatetimeConverter{ Name: "DATETIME", - } + }, nil case "TIMESTAMP": return TypeTimestampConverter{ fc: FunctionConverter{ @@ -86,31 +87,29 @@ func NewTypeConverter(t string) Converter { fsc: FunctionStringConverter{ Name: "TIMESTAMP", }, - } + }, nil case "GEOGRAPHY": return TypeGeoConverter{ Name: "ST_GeogFromText", - } + }, nil case "GEOGRAPHY(POINT)": return TypeGeoPointConverter{ Name: "ST_GeogFromText", - } + }, nil case "GEOGRAPHY(LINESTRING)": return TypeGeoLineStringConverter{ Name: "ST_GeogFromText", - } + }, nil case "GEOGRAPHY(POLYGON)": return TypeGeoPolygonConverter{ Name: "ST_GeogFromText", - } - } - return ErrorConverter{ - Err: fmt.Errorf("unsupported type %s", t), + }, nil } + return nil, errors.ErrUnsupportedValueType } -func (tc TypeStringConverter) Convert(v *Value) (*Value, error) { - v.Val = fmt.Sprintf("%q", v.Val) +func (TypeStringConverter) Convert(v *Value) (*Value, error) { + v.Val = strconv.Quote(v.Val) return v, nil } diff --git a/pkg/picker/converter_type_test.go b/pkg/picker/converter_type_test.go new file mode 100644 index 00000000..fbb20313 --- /dev/null +++ b/pkg/picker/converter_type_test.go @@ -0,0 +1,192 @@ +package picker + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("TypeConverter", func() { + It("BOOL", func() { + converter, _ := NewTypeConverter("BOOL") + + v, err := converter.Convert(&Value{ + Val: "true", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "true", + })) + + v, err = converter.Convert(&Value{ + Val: "false", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "false", + })) + }) + + It("INT", func() { + converter, _ := NewTypeConverter("int") + + v, err := converter.Convert(&Value{ + Val: "0", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "0", + })) + }) + + It("FLOAT", func() { + converter, _ := NewTypeConverter("FLOAT") + + v, err := converter.Convert(&Value{ + Val: "1.2", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "1.2", + })) + }) + + It("DOUBLE", func() { + converter, _ := NewTypeConverter("DOUBLE") + + v, err := converter.Convert(&Value{ + Val: "1.2", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "1.2", + })) + }) + + It("STRING", func() { + converter, _ := NewTypeConverter("STRING") + + v, err := converter.Convert(&Value{ + Val: "str", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "\"str\"", + })) + }) + + It("DATE", func() { + converter, _ := NewTypeConverter("DATE") + + v, err := converter.Convert(&Value{ + Val: "2020-01-02", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "DATE(\"2020-01-02\")", + })) + }) + + It("TIME", func() { + converter, _ := NewTypeConverter("TIME") + + v, err := converter.Convert(&Value{ + Val: "18:38:23.284", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "TIME(\"18:38:23.284\")", + })) + }) + + It("DATETIME", func() { + converter, _ := NewTypeConverter("DATETIME") + + v, err := converter.Convert(&Value{ + Val: "2020-01-11T19:28:23.284", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "DATETIME(\"2020-01-11T19:28:23.284\")", + })) + }) + + It("TIMESTAMP", func() { + converter, _ := NewTypeConverter("TIMESTAMP") + + v, err := converter.Convert(&Value{ + Val: "2020-01-11T19:28:23", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "TIMESTAMP(\"2020-01-11T19:28:23\")", + })) + + v, err = converter.Convert(&Value{ + Val: "1578770903", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "TIMESTAMP(1578770903)", + })) + }) + + It("GEOGRAPHY", func() { + converter, _ := NewTypeConverter("GEOGRAPHY") + + v, err := converter.Convert(&Value{ + Val: "Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "ST_GeogFromText(\"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))\")", + })) + }) + + It("GEOGRAPHY(POINT)", func() { + converter, _ := NewTypeConverter("GEOGRAPHY(POINT)") + + v, err := converter.Convert(&Value{ + Val: "Point(0.0 0.0)", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "ST_GeogFromText(\"Point(0.0 0.0)\")", + })) + }) + + It("GEOGRAPHY(LINESTRING)", func() { + converter, _ := NewTypeConverter("GEOGRAPHY(LINESTRING)") + + v, err := converter.Convert(&Value{ + Val: "linestring(0 1, 179.99 89.99)", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "ST_GeogFromText(\"linestring(0 1, 179.99 89.99)\")", + })) + }) + + It("GEOGRAPHY(POLYGON)", func() { + converter, _ := NewTypeConverter("GEOGRAPHY(POLYGON)") + + v, err := converter.Convert(&Value{ + Val: "polygon((0 1, 2 4, 3 5, 4 9, 0 1))", + }) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "ST_GeogFromText(\"polygon((0 1, 2 4, 3 5, 4 9, 0 1))\")", + })) + }) + + It("Unsupported", func() { + converter, err := NewTypeConverter("Unsupported") + + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + Expect(converter).To(BeNil()) + }) +}) diff --git a/pkg/picker/picker-concat.go b/pkg/picker/picker-concat.go deleted file mode 100644 index f84fd5dc..00000000 --- a/pkg/picker/picker-concat.go +++ /dev/null @@ -1,47 +0,0 @@ -package picker - -import ( - "strings" -) - -var _ Picker = ConcatPicker{} - -type ( - ConcatItems struct { - pickers NullablePickers - } - - ConcatPicker struct { - items ConcatItems - } -) - -func (ci *ConcatItems) AddIndex(index int) *ConcatItems { - ci.pickers = append(ci.pickers, IndexPicker(index)) - return ci -} - -func (ci *ConcatItems) AddConstant(constant string) *ConcatItems { - ci.pickers = append(ci.pickers, ConstantPicker(constant)) - return ci -} - -func (ci ConcatItems) Len() int { - return len(ci.pickers) -} - -func (cp ConcatPicker) Pick(record []string) (*Value, error) { - var sb strings.Builder - for _, p := range cp.items.pickers { - v, err := p.Pick(record) - if err != nil { - return nil, err - } - sb.WriteString(v.Val) - } - - return &Value{ - Val: sb.String(), - IsNull: false, - }, nil -} diff --git a/pkg/picker/picker-constant.go b/pkg/picker/picker-constant.go deleted file mode 100644 index 51abb60d..00000000 --- a/pkg/picker/picker-constant.go +++ /dev/null @@ -1,14 +0,0 @@ -package picker - -var ( - _ Picker = ConstantPicker("") -) - -type ConstantPicker string - -func (cp ConstantPicker) Pick(_ []string) (v *Value, err error) { - return &Value{ - Val: string(cp), - IsNull: false, - }, nil -} diff --git a/pkg/picker/picker-index.go b/pkg/picker/picker-index.go deleted file mode 100644 index fe5c23b9..00000000 --- a/pkg/picker/picker-index.go +++ /dev/null @@ -1,22 +0,0 @@ -package picker - -import "fmt" - -var ( - _ Picker = IndexPicker(0) -) - -type ( - IndexPicker int -) - -func (ip IndexPicker) Pick(record []string) (*Value, error) { - index := int(ip) - if index < 0 || index >= len(record) { - return nil, fmt.Errorf("prop index %d out range %d of record(%v)", index, len(record), record) - } - return &Value{ - Val: record[index], - IsNull: false, - }, nil -} diff --git a/pkg/picker/picker.go b/pkg/picker/picker.go index 7329ecc0..c2f464ef 100644 --- a/pkg/picker/picker.go +++ b/pkg/picker/picker.go @@ -1,6 +1,7 @@ package picker var ( + _ Picker = PickerFunc(nil) _ Picker = ConverterPicker{} _ Picker = NullablePickers{} ) @@ -10,6 +11,8 @@ type ( Pick([]string) (*Value, error) } + PickerFunc func(record []string) (*Value, error) + ConverterPicker struct { picker Picker converter Converter @@ -18,11 +21,18 @@ type ( NullablePickers []Picker ) +func (f PickerFunc) Pick(record []string) (*Value, error) { + return f(record) +} + func (cp ConverterPicker) Pick(record []string) (*Value, error) { v, err := cp.picker.Pick(record) if err != nil { return nil, err } + if cp.converter == nil { + return v, nil + } return cp.converter.Convert(v) } diff --git a/pkg/picker/picker_concat.go b/pkg/picker/picker_concat.go new file mode 100644 index 00000000..fae23906 --- /dev/null +++ b/pkg/picker/picker_concat.go @@ -0,0 +1,86 @@ +package picker + +import ( + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" +) + +var _ Picker = ConcatPicker{} + +type ( + ConcatItems struct { + pickers NullablePickers + } + + ConcatPicker struct { + items ConcatItems + } +) + +func (ci *ConcatItems) Add(items ...any) (err error) { + for i := range items { + switch v := items[i].(type) { + case uint8: + err = ci.AddIndex(int(v)) + case int8: + err = ci.AddIndex(int(v)) + case uint16: + err = ci.AddIndex(int(v)) + case int16: + err = ci.AddIndex(int(v)) + case uint32: + err = ci.AddIndex(int(v)) + case int32: + err = ci.AddIndex(int(v)) + case uint64: + err = ci.AddIndex(int(v)) + case int64: + err = ci.AddIndex(int(v)) + case int: + err = ci.AddIndex(v) + case uint: + err = ci.AddIndex(int(v)) + case string: + err = ci.AddConstant(v) + case []byte: + err = ci.AddConstant(string(v)) + default: + err = errors.ErrUnsupportedConcatItemType + } + if err != nil { + break + } + } + return err +} + +func (ci *ConcatItems) AddIndex(index int) error { + if index < 0 { + return errors.ErrInvalidIndex + } + ci.pickers = append(ci.pickers, IndexPicker(index)) + return nil +} + +func (ci *ConcatItems) AddConstant(constant string) error { + ci.pickers = append(ci.pickers, ConstantPicker(constant)) + return nil +} + +func (ci ConcatItems) Len() int { + return len(ci.pickers) +} + +func (cp ConcatPicker) Pick(record []string) (*Value, error) { + var sb strings.Builder + for _, p := range cp.items.pickers { + v, err := p.Pick(record) + if err != nil { + return nil, err + } + _, _ = sb.WriteString(v.Val) + v.Release() + } + return NewValue(sb.String()), nil +} diff --git a/pkg/picker/picker_concat_test.go b/pkg/picker/picker_concat_test.go new file mode 100644 index 00000000..631aa5f2 --- /dev/null +++ b/pkg/picker/picker_concat_test.go @@ -0,0 +1,84 @@ +package picker + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConcatItems", func() { + DescribeTable("types", + func(item any, expectErr error) { + ci := ConcatItems{} + err := ci.Add(item) + if expectErr == nil { + Expect(err).NotTo(HaveOccurred()) + } else { + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + } + }, + EntryDescription("type %[1]T"), + Entry(nil, uint8(1), nil), + Entry(nil, int8(1), nil), + Entry(nil, int8(-1), errors.ErrInvalidIndex), + Entry(nil, uint16(1), nil), + Entry(nil, int16(1), nil), + Entry(nil, int16(-1), errors.ErrInvalidIndex), + Entry(nil, uint32(1), nil), + Entry(nil, int32(1), nil), + Entry(nil, int32(-1), errors.ErrInvalidIndex), + Entry(nil, uint64(1), nil), + Entry(nil, int64(1), nil), + Entry(nil, int64(-1), errors.ErrInvalidIndex), + Entry(nil, uint(1), nil), + Entry(nil, int(1), nil), + Entry(nil, int(-1), errors.ErrInvalidIndex), + Entry(nil, "str", nil), + Entry(nil, []byte("str"), nil), + Entry(nil, struct{}{}, errors.ErrUnsupportedConcatItemType), + ) + + It("nil", func() { + ci := ConcatItems{} + err := ci.Add() + Expect(err).NotTo(HaveOccurred()) + Expect(ci.Len()).To(Equal(0)) + }) + + It("many", func() { + ci := ConcatItems{} + err := ci.Add(1, "str1", 2, []byte("str2"), 3) + Expect(err).NotTo(HaveOccurred()) + Expect(ci.Len()).To(Equal(5)) + }) +}) + +var _ = Describe("ConcatPicker", func() { + DescribeTable(".Pick", + func(items []any, records []string, expectValue *Value, expectErr error) { + ci := ConcatItems{} + Expect(ci.Add(items...)).To(Not(HaveOccurred())) + + cp := ConcatPicker{ + items: ci, + } + value, err := cp.Pick(records) + if expectErr != nil { + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + Expect(value).To(BeNil()) + } else { + Expect(err).NotTo(HaveOccurred()) + Expect(value).To(Equal(expectValue)) + } + }, + Entry("int", []any{0}, []string{"10"}, &Value{Val: "10"}, nil), + Entry("string", []any{"str"}, []string{"10"}, &Value{Val: "str"}, nil), + Entry("mixed", []any{0, "str", 2}, []string{"10", "11", "12"}, &Value{Val: "10str12"}, nil), + Entry("pick failed", []any{0, "str", 2}, []string{"10", "11"}, nil, errors.ErrNoRecord), + ) +}) diff --git a/pkg/picker/picker_constant.go b/pkg/picker/picker_constant.go new file mode 100644 index 00000000..d26b9d3d --- /dev/null +++ b/pkg/picker/picker_constant.go @@ -0,0 +1,11 @@ +package picker + +var ( + _ Picker = ConstantPicker("") +) + +type ConstantPicker string + +func (cp ConstantPicker) Pick(_ []string) (*Value, error) { + return NewValue(string(cp)), nil +} diff --git a/pkg/picker/picker_constant_test.go b/pkg/picker/picker_constant_test.go new file mode 100644 index 00000000..06821f0f --- /dev/null +++ b/pkg/picker/picker_constant_test.go @@ -0,0 +1,18 @@ +package picker + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConstantPicker", func() { + It("normal ConstantPicker", func() { + picker := ConstantPicker("test constant") + + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test constant", + })) + }) +}) diff --git a/pkg/picker/picker_index.go b/pkg/picker/picker_index.go new file mode 100644 index 00000000..70d62f6a --- /dev/null +++ b/pkg/picker/picker_index.go @@ -0,0 +1,19 @@ +package picker + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" +) + +var ( + _ Picker = IndexPicker(0) +) + +type IndexPicker int + +func (ip IndexPicker) Pick(record []string) (*Value, error) { + index := int(ip) + if index < 0 || index >= len(record) { + return nil, errors.ErrNoRecord + } + return NewValue(record[index]), nil +} diff --git a/pkg/picker/picker_index_test.go b/pkg/picker/picker_index_test.go new file mode 100644 index 00000000..6a764683 --- /dev/null +++ b/pkg/picker/picker_index_test.go @@ -0,0 +1,43 @@ +package picker + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("IndexPicker", func() { + It("normal IndexPicker", func() { + picker := IndexPicker(1) + + v, err := picker.Pick(nil) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(v).To(BeNil()) + + v, err = picker.Pick([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(v).To(BeNil()) + + v, err = picker.Pick([]string{"v0"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(v).To(BeNil()) + + v, err = picker.Pick([]string{"v0", "v1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v1", + })) + + v, err = picker.Pick([]string{"v0", "v1", "v2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v1", + })) + }) +}) diff --git a/pkg/picker/picker_suite_test.go b/pkg/picker/picker_suite_test.go new file mode 100644 index 00000000..7cd5a0a4 --- /dev/null +++ b/pkg/picker/picker_suite_test.go @@ -0,0 +1,13 @@ +package picker + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestPicker(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg picker Suite") +} diff --git a/pkg/picker/picker_test.go b/pkg/picker/picker_test.go new file mode 100644 index 00000000..0e6d754b --- /dev/null +++ b/pkg/picker/picker_test.go @@ -0,0 +1,197 @@ +package picker + +import ( + stderrors "errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConverterPicker", func() { + It("normal ConverterPicker", func() { + picker := ConverterPicker{ + picker: PickerFunc(func(strings []string) (*Value, error) { + return &Value{Val: "v"}, nil + }), + converter: ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test " + v.Val + return v, nil + }), + } + + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "test v", + })) + }) + + It("pick failed ConverterPicker", func() { + picker := ConverterPicker{ + picker: PickerFunc(func(strings []string) (*Value, error) { + return nil, stderrors.New("test error") + }), + converter: ConverterFunc(func(v *Value) (*Value, error) { + v.Val = "test " + v.Val + return v, nil + }), + } + + v, err := picker.Pick([]string{}) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test error")) + Expect(v).To(BeNil()) + }) + + It("converter failed ConverterPicker", func() { + picker := ConverterPicker{ + picker: PickerFunc(func(strings []string) (*Value, error) { + return &Value{Val: "v"}, nil + }), + converter: ConverterFunc(func(v *Value) (*Value, error) { + return nil, stderrors.New("test error") + }), + } + + v, err := picker.Pick([]string{}) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test error")) + Expect(v).To(BeNil()) + }) + + It("converter nil ConverterPicker", func() { + picker := ConverterPicker{ + picker: PickerFunc(func(strings []string) (*Value, error) { + return &Value{Val: "v"}, nil + }), + } + + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + }) +}) + +var _ = Describe("NullablePickers", func() { + It("one NullablePickers", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v"}, nil + }), + } + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v", + })) + }) + + It("one NullablePickers failed", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return nil, stderrors.New("test failed") + }), + } + v, err := picker.Pick([]string{}) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test failed")) + Expect(v).To(BeNil()) + }) + + It("many NullablePickers first", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v1"}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v2"}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v3"}, nil + }), + } + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v1", + })) + }) + + It("many NullablePickers middle", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v2"}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v3"}, nil + }), + } + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v2", + })) + }) + + It("many NullablePickers last", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{Val: "v3"}, nil + }), + } + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "v3", + })) + }) + + It("many NullablePickers no", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + } + v, err := picker.Pick([]string{}) + Expect(err).NotTo(HaveOccurred()) + Expect(v).To(Equal(&Value{ + Val: "", + IsNull: true, + })) + }) + + It("many NullablePickers failed", func() { + picker := NullablePickers{ + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return &Value{IsNull: true}, nil + }), + PickerFunc(func([]string) (*Value, error) { + return nil, stderrors.New("test error") + }), + } + v, err := picker.Pick([]string{}) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(Equal("test error")) + Expect(v).To(BeNil()) + }) +}) diff --git a/pkg/picker/value.go b/pkg/picker/value.go index ac47d85a..bd832752 100644 --- a/pkg/picker/value.go +++ b/pkg/picker/value.go @@ -1,7 +1,29 @@ package picker +import ( + "sync" +) + +var valuePool = sync.Pool{ + New: func() any { + return &Value{} + }, +} + type Value struct { Val string IsNull bool isSetNull bool } + +func NewValue(val string) *Value { + v := valuePool.Get().(*Value) + v.Val = val + v.IsNull = false + v.isSetNull = false + return v +} + +func (v *Value) Release() { + valuePool.Put(v) +} diff --git a/pkg/reader/batch.go b/pkg/reader/batch.go index 47390a7c..45559c00 100644 --- a/pkg/reader/batch.go +++ b/pkg/reader/batch.go @@ -1,80 +1,91 @@ +//go:generate mockgen -source=batch.go -destination batch_mock.go -package reader BatchRecordReader package reader import ( - "fmt" + stderrors "errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" ) -type Batch struct { - errCh chan<- base.ErrData - clientRequestCh chan base.ClientRequest - bufferSize int - currentIndex int - buffer []base.Data - batchMgr *BatchMgr -} +type ( + BatchRecordReader interface { + Source() source.Source + source.Sizer + ReadBatch() (int, spec.Records, error) + } -func NewBatch(mgr *BatchMgr, bufferSize int, clientReq chan base.ClientRequest, errCh chan<- base.ErrData) *Batch { - b := Batch{ - errCh: errCh, - clientRequestCh: clientReq, - bufferSize: bufferSize, - currentIndex: 0, - buffer: make([]base.Data, bufferSize), - batchMgr: mgr, + continueError struct { + Err error } - return &b -} -func (b *Batch) IsFull() bool { - return b.currentIndex == b.bufferSize -} + defaultBatchReader struct { + *options + rr RecordReader + } +) -func (b *Batch) Add(data base.Data) { - if b.IsFull() { - b.requestClient() +func NewBatchRecordReader(rr RecordReader, batch int, opts ...Option) BatchRecordReader { + brr := &defaultBatchReader{ + options: newOptions(append(opts, WithBatch(batch))...), + rr: rr, } - b.buffer[b.currentIndex] = data - b.currentIndex++ + brr.logger = brr.logger.With(logger.Field{Key: "source", Value: rr.Source().Name()}) + return brr } -func (b *Batch) Done() { - if b.currentIndex > 0 { - b.requestClient() +func NewContinueError(err error) error { + return &continueError{ + Err: err, } +} +func (r *defaultBatchReader) Source() source.Source { + return r.rr.Source() +} - b.clientRequestCh <- base.ClientRequest{ - ErrCh: b.errCh, - Stmt: base.STAT_FILEDONE, - } +func (r *defaultBatchReader) Size() (int64, error) { + return r.rr.Size() } -func (b *Batch) requestClient() { - var stmt string - var err error - if b.batchMgr.Schema.IsVertex() { - stmt, err = b.batchMgr.MakeVertexStmt(b.buffer[:b.currentIndex]) - } else { - stmt, err = b.batchMgr.MakeEdgeStmt(b.buffer[:b.currentIndex]) - } +func (r *defaultBatchReader) ReadBatch() (int, spec.Records, error) { + var ( + totalBytes int + records = make(spec.Records, 0, r.batch) + ) - if err != nil { - stmt = fmt.Sprintf("%s(%s)", "THERE_ARE_SOME_ERRORS", err.Error()) - } + for batch := 0; batch < r.batch; { + n, record, err := r.rr.Read() + totalBytes += n + if err != nil { + // case1: Read continue error. + if ce := new(continueError); stderrors.As(err, &ce) { + r.logger.WithError(ce.Err).Error("read source failed") + continue + } + + // case2: Read error and still have records. + if totalBytes > 0 { + break + } - b.clientRequestCh <- base.ClientRequest{ - Stmt: stmt, - ErrCh: b.errCh, - Data: b.buffer[:b.currentIndex], + // Read error and have no records. + return 0, nil, err + } + batch++ + records = append(records, record) } + return totalBytes, records, nil +} - b.currentIndex = 0 +func (ce *continueError) Error() string { + return ce.Err.Error() } -func (b *Batch) SendErrorData(d base.Data, err error) { - b.errCh <- base.ErrData{ - Error: err, - Data: []base.Data{d}, - } +func (ce *continueError) Cause() error { + return ce.Err +} + +func (ce *continueError) Unwrap() error { + return ce.Err } diff --git a/pkg/reader/batch_mock.go b/pkg/reader/batch_mock.go new file mode 100644 index 00000000..f5baefc0 --- /dev/null +++ b/pkg/reader/batch_mock.go @@ -0,0 +1,81 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: batch.go + +// Package reader is a generated GoMock package. +package reader + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + source "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + spec "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +// MockBatchRecordReader is a mock of BatchRecordReader interface. +type MockBatchRecordReader struct { + ctrl *gomock.Controller + recorder *MockBatchRecordReaderMockRecorder +} + +// MockBatchRecordReaderMockRecorder is the mock recorder for MockBatchRecordReader. +type MockBatchRecordReaderMockRecorder struct { + mock *MockBatchRecordReader +} + +// NewMockBatchRecordReader creates a new mock instance. +func NewMockBatchRecordReader(ctrl *gomock.Controller) *MockBatchRecordReader { + mock := &MockBatchRecordReader{ctrl: ctrl} + mock.recorder = &MockBatchRecordReaderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockBatchRecordReader) EXPECT() *MockBatchRecordReaderMockRecorder { + return m.recorder +} + +// ReadBatch mocks base method. +func (m *MockBatchRecordReader) ReadBatch() (int, spec.Records, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ReadBatch") + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(spec.Records) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// ReadBatch indicates an expected call of ReadBatch. +func (mr *MockBatchRecordReaderMockRecorder) ReadBatch() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReadBatch", reflect.TypeOf((*MockBatchRecordReader)(nil).ReadBatch)) +} + +// Size mocks base method. +func (m *MockBatchRecordReader) Size() (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Size") + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Size indicates an expected call of Size. +func (mr *MockBatchRecordReaderMockRecorder) Size() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Size", reflect.TypeOf((*MockBatchRecordReader)(nil).Size)) +} + +// Source mocks base method. +func (m *MockBatchRecordReader) Source() source.Source { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Source") + ret0, _ := ret[0].(source.Source) + return ret0 +} + +// Source indicates an expected call of Source. +func (mr *MockBatchRecordReaderMockRecorder) Source() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Source", reflect.TypeOf((*MockBatchRecordReader)(nil).Source)) +} diff --git a/pkg/reader/batch_test.go b/pkg/reader/batch_test.go new file mode 100644 index 00000000..10be1b97 --- /dev/null +++ b/pkg/reader/batch_test.go @@ -0,0 +1,282 @@ +package reader + +import ( + stderrors "errors" + "io" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + pkgerrors "github.com/pkg/errors" +) + +var _ = Describe("BatchRecordReader", func() { + When("successfully", func() { + var ( + s source.Source + rr RecordReader + ) + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local.csv", + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + rr = NewRecordReader(s) + Expect(rr).NotTo(BeNil()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + It("default batch", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 0) + Expect(brr.Source()).NotTo(BeNil()) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(33)) + Expect(records).To(Equal([]spec.Record{ + {"1", "2", "3"}, + {"4", " 5", "6"}, + {" 7", "8", " 9"}, + {"10", " 11 ", " 12"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + + It("1 batch", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 1) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(6)) + Expect(records).To(Equal([]spec.Record{ + {"1", "2", "3"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(7)) + Expect(records).To(Equal([]spec.Record{ + {"4", " 5", "6"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(8)) + Expect(records).To(Equal([]spec.Record{ + {" 7", "8", " 9"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(12)) + Expect(records).To(Equal([]spec.Record{ + {"10", " 11 ", " 12"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + + It("2 batch", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 2) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(13)) + Expect(records).To(Equal([]spec.Record{ + {"1", "2", "3"}, + {"4", " 5", "6"}, + })) + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(20)) + Expect(records).To(Equal([]spec.Record{ + {" 7", "8", " 9"}, + {"10", " 11 ", " 12"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + + It("3 batch", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 3) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(21)) + Expect(records).To(Equal([]spec.Record{ + {"1", "2", "3"}, + {"4", " 5", "6"}, + {" 7", "8", " 9"}, + })) + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(12)) + Expect(records).To(Equal([]spec.Record{ + {"10", " 11 ", " 12"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + + It("4 batch", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 4) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(33)) + Expect(records).To(Equal([]spec.Record{ + {"1", "2", "3"}, + {"4", " 5", "6"}, + {" 7", "8", " 9"}, + {"10", " 11 ", " 12"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + }) + + When("failed", func() { + var ( + s source.Source + rr RecordReader + ) + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local_failed.csv", + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + rr = NewRecordReader(s) + Expect(rr).NotTo(BeNil()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("", func() { + var ( + nBytes int64 + n int + records []spec.Record + err error + ) + brr := NewBatchRecordReader(rr, 2) + nBytes, err = brr.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(16))) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(13)) + Expect(records).To(Equal([]spec.Record{ + {"id1"}, + {"id3"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(3)) + Expect(records).To(Equal([]spec.Record{ + {"id4"}, + })) + + n, records, err = brr.ReadBatch() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(records).To(BeEmpty()) + }) + }) +}) + +var _ = Describe("continueError", func() { + It("", func() { + var baseErr = stderrors.New("test error") + err := NewContinueError(baseErr) + Expect(err.Error()).To(Equal(baseErr.Error())) + Expect(stderrors.Unwrap(err)).To(Equal(baseErr)) + Expect(pkgerrors.Cause(err)).To(Equal(baseErr)) + }) +}) diff --git a/pkg/reader/batchmgr.go b/pkg/reader/batchmgr.go deleted file mode 100644 index 71be14b0..00000000 --- a/pkg/reader/batchmgr.go +++ /dev/null @@ -1,344 +0,0 @@ -package reader - -import ( - "errors" - "fmt" - "hash/fnv" - "strings" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type BatchMgr struct { - Schema *config.Schema - Batches []*Batch - InsertStmtPrefix string - initializedSchema bool - emptyPropsTagNames []string - runnerLogger *logger.RunnerLogger -} - -func NewBatchMgr(schema *config.Schema, batchSize int, clientRequestChs []chan base.ClientRequest, errCh chan<- base.ErrData) *BatchMgr { - bm := BatchMgr{ - Schema: schema, - Batches: make([]*Batch, len(clientRequestChs)), - initializedSchema: false, - emptyPropsTagNames: schema.CollectEmptyPropsTagNames(), - } - - for i := range bm.Batches { - bm.Batches[i] = NewBatch(&bm, batchSize, clientRequestChs[i], errCh) - } - bm.generateInsertStmtPrefix() - return &bm -} - -func (bm *BatchMgr) Done() { - for i := range bm.Batches { - bm.Batches[i].Done() - } -} - -func (bm *BatchMgr) InitSchema(header base.Record, runnerLogger *logger.RunnerLogger) (err error) { - if bm.initializedSchema { - logger.Log.Info("Batch manager schema has been initialized!") - return - } - bm.initializedSchema = true - for i, hh := range header { - for _, h := range strings.Split(hh, "/") { - switch c := strings.ToUpper(h); { - case c == base.LABEL_LABEL: - err = fmt.Errorf("Invalid schema: %v", header) - case strings.HasPrefix(c, base.LABEL_VID): - *bm.Schema.Vertex.VID.Index = i - err = bm.Schema.Vertex.VID.ParseFunction(c) - _ = bm.Schema.Vertex.VID.InitPicker() - case strings.HasPrefix(c, base.LABEL_SRC_VID): - *bm.Schema.Edge.SrcVID.Index = i - err = bm.Schema.Edge.SrcVID.ParseFunction(c) - _ = bm.Schema.Edge.SrcVID.InitPicker() - case strings.HasPrefix(c, base.LABEL_DST_VID): - *bm.Schema.Edge.DstVID.Index = i - err = bm.Schema.Edge.DstVID.ParseFunction(c) - _ = bm.Schema.Edge.DstVID.InitPicker() - case c == base.LABEL_RANK: - if bm.Schema.Edge.Rank == nil { - rank := i - bm.Schema.Edge.Rank = &config.Rank{Index: &rank} - } else { - *bm.Schema.Edge.Rank.Index = i - } - case c == base.LABEL_IGNORE: - default: - if bm.Schema.IsVertex() { - bm.addVertexTags(h, i) - } else { - bm.addEdgeProps(h, i) - } - } - } - } - - for _, tagName := range bm.emptyPropsTagNames { - bm.getOrCreateVertexTagByName(tagName) - } - - bm.generateInsertStmtPrefix() - return -} - -func (bm *BatchMgr) addVertexTags(r string, i int) { - columnName, columnType := bm.parseProperty(r) - tagName, prop := bm.parseTag(columnName) - if tagName == "" { - return - } - tag := bm.getOrCreateVertexTagByName(tagName) - p := config.Prop{ - Name: &prop, - Type: &columnType, - Index: &i, - } - _ = p.InitPicker() - tag.Props = append(tag.Props, &p) -} - -func (bm *BatchMgr) addEdgeProps(r string, i int) { - columnName, columnType := bm.parseProperty(r) - res := strings.SplitN(columnName, ".", 2) - prop := res[0] - if len(res) > 1 { - prop = res[1] - } - p := config.Prop{ - Name: &prop, - Type: &columnType, - Index: &i, - } - _ = p.InitPicker() - bm.Schema.Edge.Props = append(bm.Schema.Edge.Props, &p) -} - -func (bm *BatchMgr) generateInsertStmtPrefix() { - var builder strings.Builder - if bm.Schema.IsVertex() { - builder.WriteString("INSERT VERTEX ") - for i, tag := range bm.Schema.Vertex.Tags { - builder.WriteString(fmt.Sprintf("`%s`(%s)", *tag.Name, bm.GeneratePropsString(tag.Props))) - if i < len(bm.Schema.Vertex.Tags)-1 { - builder.WriteString(",") - } - } - builder.WriteString(" VALUES ") - } else { - edge := bm.Schema.Edge - builder.WriteString(fmt.Sprintf("INSERT EDGE `%s`(%s) VALUES ", *edge.Name, bm.GeneratePropsString(edge.Props))) - } - bm.InsertStmtPrefix = builder.String() -} - -func (bm *BatchMgr) GeneratePropsString(props []*config.Prop) string { - var builder strings.Builder - for i, prop := range props { - builder.WriteString("`") - builder.WriteString(*prop.Name) - builder.WriteString("`") - if i < len(props)-1 { - builder.WriteString(",") - } - } - return builder.String() -} - -func (bm *BatchMgr) getOrCreateVertexTagByName(name string) *config.Tag { - for i := range bm.Schema.Vertex.Tags { - if strings.EqualFold(*bm.Schema.Vertex.Tags[i].Name, name) { - return bm.Schema.Vertex.Tags[i] - } - } - newTag := &config.Tag{ - Name: &name, - } - bm.Schema.Vertex.Tags = append(bm.Schema.Vertex.Tags, newTag) - return newTag -} - -func (bm *BatchMgr) parseTag(s string) (tag, field string) { - res := strings.SplitN(s, ".", 2) - - if len(res) < 2 { - return "", "" - } - - return res[0], res[1] -} - -func (bm *BatchMgr) parseProperty(r string) (columnName, columnType string) { - res := strings.SplitN(r, ":", 2) - - if len(res) == 1 || res[1] == "" || !base.IsValidType(res[1]) { - return res[0], "string" - } else { - return res[0], res[1] - } -} - -func (bm *BatchMgr) Add(data base.Data, runnerLogger *logger.RunnerLogger) error { - var vid string - if bm.Schema.IsVertex() { - vid = data.Record[*bm.Schema.Vertex.VID.Index] - } else { - vid = data.Record[*bm.Schema.Edge.SrcVID.Index] - } - batchIdx := getBatchId(vid, len(bm.Batches), runnerLogger) - bm.Batches[batchIdx].Add(data) - return nil -} - -var h = fnv.New32a() - -func getBatchId(idStr string, numChans int, runnerLogger *logger.RunnerLogger) uint32 { - _, err := h.Write([]byte(idStr)) - if err != nil { - logger.Log.Error(err) - } - return h.Sum32() % uint32(numChans) -} - -func makeStmt(batch []base.Data, f func([]base.Data) (string, error)) (string, error) { - if len(batch) == 0 { - return "", errors.New("Make stmt for empty batch") - } - - if len(batch) == 1 { - return f(batch) - } - - var builder strings.Builder - lastIdx, length := 0, len(batch) - for i := 1; i < length; i++ { - if batch[i-1].Type != batch[i].Type { - str, err := f(batch[lastIdx:i]) - if err != nil { - return "", err - } - builder.WriteString(str) - lastIdx = i - } - } - str, err := f(batch[lastIdx:]) - if err != nil { - return "", err - } - builder.WriteString(str) - return builder.String(), nil -} - -func (m *BatchMgr) MakeVertexStmt(batch []base.Data) (string, error) { - return makeStmt(batch, m.makeVertexBatchStmt) -} - -func (m *BatchMgr) makeVertexBatchStmt(batch []base.Data) (string, error) { - length := len(batch) - switch batch[length-1].Type { - case base.INSERT: - return m.makeVertexInsertStmt(batch) - case base.DELETE: - return m.makeVertexDeleteStmt(batch) - default: - return "", fmt.Errorf("Invalid data type: %s", batch[length-1].Type) - } -} - -func (m *BatchMgr) makeVertexInsertStmt(data []base.Data) (string, error) { - var builder strings.Builder - builder.WriteString(m.InsertStmtPrefix) - batchSize := len(data) - for i := 0; i < batchSize; i++ { - str, err := m.Schema.Vertex.FormatValues(data[i].Record) - if err != nil { - return "", err - } - builder.WriteString(str) - if i < batchSize-1 { - builder.WriteString(",") - } else { - builder.WriteString(";") - } - } - - return builder.String(), nil -} - -func (m *BatchMgr) makeVertexDeleteStmt(data []base.Data) (string, error) { - var idList []string - for _, d := range data { - vid, err := m.Schema.Vertex.VID.FormatValue(d.Record) - if err != nil { - return "", err - } - idList = append(idList, vid) - } - return fmt.Sprintf("DELETE VERTEX %s;", strings.Join(idList, ",")), nil -} - -func (m *BatchMgr) MakeEdgeStmt(batch []base.Data) (string, error) { - return makeStmt(batch, m.makeEdgeBatchStmt) -} - -func (m *BatchMgr) makeEdgeBatchStmt(batch []base.Data) (string, error) { - length := len(batch) - switch batch[length-1].Type { - case base.INSERT: - return m.makeEdgeInsertStmt(batch) - case base.DELETE: - return m.makeEdgeDeleteStmt(batch) - default: - return "", fmt.Errorf("Invalid data type: %s", batch[length-1].Type) - } -} - -func (m *BatchMgr) makeEdgeInsertStmt(batch []base.Data) (string, error) { - var builder strings.Builder - builder.WriteString(m.InsertStmtPrefix) - batchSize := len(batch) - for i := 0; i < batchSize; i++ { - str, err := m.Schema.Edge.FormatValues(batch[i].Record) - if err != nil { - return "", err - } - builder.WriteString(str) - if i < batchSize-1 { - builder.WriteString(",") - } else { - builder.WriteString(";") - } - } - return builder.String(), nil -} - -func (m *BatchMgr) makeEdgeDeleteStmt(batch []base.Data) (string, error) { - var idList []string - for _, d := range batch { - var id string - srcVid, err := m.Schema.Edge.SrcVID.FormatValue(d.Record) - if err != nil { - return "", err - } - dstVid, err := m.Schema.Edge.DstVID.FormatValue(d.Record) - if err != nil { - return "", err - } - if m.Schema.Edge.Rank != nil { - rank := d.Record[*m.Schema.Edge.Rank.Index] - id = fmt.Sprintf("%s->%s@%s", srcVid, dstVid, rank) - } else { - id = fmt.Sprintf("%s->%s", srcVid, dstVid) - } - idList = append(idList, id) - } - return fmt.Sprintf("DELETE EDGE %s %s;", *m.Schema.Edge.Name, strings.Join(idList, ",")), nil -} diff --git a/pkg/reader/csv.go b/pkg/reader/csv.go new file mode 100644 index 00000000..f96316d7 --- /dev/null +++ b/pkg/reader/csv.go @@ -0,0 +1,99 @@ +package reader + +import ( + "bufio" + "encoding/csv" + stderrors "errors" + "io" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +type ( + header struct { + withHeader bool + hasRead bool + } + + csvReader struct { + *baseReader + rr *remainingReader + br *bufio.Reader + cr *csv.Reader + h header + } + + remainingReader struct { + io.Reader + remaining int + } +) + +func NewCSVReader(s source.Source) RecordReader { + rr := &remainingReader{Reader: s} + br := bufio.NewReader(rr) + cr := csv.NewReader(br) + h := header{} + + if c := s.Config(); c != nil && c.CSV != nil { + if chars := []rune(c.CSV.Delimiter); len(chars) > 0 { + cr.Comma = chars[0] + } + cr.LazyQuotes = c.CSV.LazyQuotes + + h.withHeader = c.CSV.WithHeader + } + + return &csvReader{ + baseReader: &baseReader{ + s: s, + }, + rr: rr, + br: br, + cr: cr, + h: h, + } +} + +func (r *csvReader) Size() (int64, error) { + return r.s.Size() +} + +func (r *csvReader) Read() (int, spec.Record, error) { + // determine whether the reader has read the csv header + if r.h.withHeader && !r.h.hasRead { + r.h.hasRead = true + + // if read header, read and move to next line + record, err := r.cr.Read() + if err != nil { + return 0, record, r.wrapErr(err) + } + } + + record, err := r.cr.Read() + return r.rr.Take(r.br.Buffered()), record, r.wrapErr(err) +} + +func (*csvReader) wrapErr(err error) error { + if err == nil { + return nil + } + + if pe := new(csv.ParseError); stderrors.As(err, &pe) { + err = NewContinueError(err) + } + return err +} + +func (r *remainingReader) Read(p []byte) (n int, err error) { + n, err = r.Reader.Read(p) + r.remaining += n + return n, err +} + +func (r *remainingReader) Take(buffered int) (n int) { + n, r.remaining = r.remaining-buffered, buffered + return n +} diff --git a/pkg/reader/csv_test.go b/pkg/reader/csv_test.go new file mode 100644 index 00000000..2db4223e --- /dev/null +++ b/pkg/reader/csv_test.go @@ -0,0 +1,219 @@ +package reader + +import ( + stderrors "errors" + "io" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("csvReader", func() { + Describe("default", func() { + var s source.Source + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local.csv", + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("should success", func() { + var ( + nBytes int64 + n int + record spec.Record + err error + ) + r := NewCSVReader(s) + nBytes, err = r.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(6)) + Expect(record).To(Equal(spec.Record{"1", "2", "3"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(7)) + Expect(record).To(Equal(spec.Record{"4", " 5", "6"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(8)) + Expect(record).To(Equal(spec.Record{" 7", "8", " 9"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(12)) + Expect(record).To(Equal(spec.Record{"10", " 11 ", " 12"})) + + n, record, err = r.Read() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(record).To(BeEmpty()) + }) + }) + + Describe("with delimiter", func() { + var s source.Source + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local_delimiter.csv", + }, + CSV: &source.CSVConfig{ + Delimiter: "|", + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("should success", func() { + var ( + nBytes int64 + n int + record spec.Record + err error + ) + r := NewCSVReader(s) + nBytes, err = r.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(33))) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(6)) + Expect(record).To(Equal(spec.Record{"1", "2", "3"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(7)) + Expect(record).To(Equal(spec.Record{"4", " 5", "6"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(8)) + Expect(record).To(Equal(spec.Record{" 7", "8", " 9"})) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(12)) + Expect(record).To(Equal(spec.Record{"10", " 11 ", " 12"})) + + n, record, err = r.Read() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(record).To(BeEmpty()) + }) + }) + + Describe("withHeader", func() { + var s source.Source + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local_withHeader.csv", + }, + CSV: &source.CSVConfig{ + WithHeader: true, + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("should success", func() { + var ( + nBytes int64 + n int + record spec.Record + err error + ) + r := NewCSVReader(s) + nBytes, err = r.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(14))) + + n, record, err = r.Read() + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(14)) + Expect(record).To(Equal(spec.Record{"1", "2", "3"})) + + n, record, err = r.Read() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, io.EOF)).To(BeTrue()) + Expect(n).To(Equal(0)) + Expect(record).To(BeEmpty()) + }) + }) + + Describe("withHeader read failed", func() { + var s source.Source + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local_withHeader_failed.csv", + }, + CSV: &source.CSVConfig{ + WithHeader: true, + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("should success", func() { + var ( + nBytes int64 + err error + ) + r := NewCSVReader(s) + nBytes, err = r.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(9))) + + _, _, err = r.Read() + Expect(err).To(HaveOccurred()) + }) + }) +}) diff --git a/pkg/reader/option.go b/pkg/reader/option.go new file mode 100644 index 00000000..c948362a --- /dev/null +++ b/pkg/reader/option.go @@ -0,0 +1,54 @@ +package reader + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" +) + +const ( + DefaultBatchSize = 128 +) + +type ( + Option func(*options) + + options struct { + batch int + logger logger.Logger + } +) + +func WithBatch(batch int) Option { + return func(m *options) { + m.batch = batch + } +} + +func WithLogger(l logger.Logger) Option { + return func(m *options) { + m.logger = l + } +} + +func newOptions(opts ...Option) *options { + var defaultOptions = &options{ + batch: DefaultBatchSize, + } + + defaultOptions.withOptions(opts...) + + if defaultOptions.batch <= 0 { + defaultOptions.batch = DefaultBatchSize + } + + return defaultOptions +} + +func (o *options) withOptions(opts ...Option) { + for _, opt := range opts { + opt(o) + } + + if o.logger == nil { + o.logger = logger.NopLogger + } +} diff --git a/pkg/reader/option_test.go b/pkg/reader/option_test.go new file mode 100644 index 00000000..aab31a22 --- /dev/null +++ b/pkg/reader/option_test.go @@ -0,0 +1,27 @@ +package reader + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Option", func() { + It("newOptions", func() { + o := newOptions() + Expect(o).NotTo(BeNil()) + + Expect(o.logger).NotTo(BeNil()) + }) + + It("withXXX", func() { + o := newOptions( + WithBatch(100), + WithLogger(logger.NopLogger), + ) + Expect(o).NotTo(BeNil()) + Expect(o.batch).To(Equal(100)) + Expect(o.logger).NotTo(BeNil()) + }) +}) diff --git a/pkg/reader/reader.go b/pkg/reader/reader.go index 4e570545..db645ed5 100644 --- a/pkg/reader/reader.go +++ b/pkg/reader/reader.go @@ -1,182 +1,13 @@ package reader -import ( - "fmt" - "io" - "io/ioutil" - "net/http" - "net/url" - "os" - "strings" - "time" +import "github.com/vesoft-inc/nebula-importer/v4/pkg/source" - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/csv" - "github.com/vesoft-inc/nebula-importer/v3/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type DataFileReader interface { - InitReader(*os.File, *logger.RunnerLogger) - ReadLine() (base.Data, error) - TotalBytes() (int64, error) -} - -// FIXME: private fields -type FileReader struct { - FileIdx int - File *config.File - localFile bool - cleanup bool - WithHeader bool - DataReader DataFileReader - Concurrency int - BatchMgr *BatchMgr - StopFlag bool - runnerLogger *logger.RunnerLogger -} - -func New(fileIdx int, file *config.File, cleanup bool, clientRequestChs []chan base.ClientRequest, - errCh chan<- base.ErrData, runnerLogger *logger.RunnerLogger) (*FileReader, error) { - switch strings.ToLower(*file.Type) { - case "csv": - r := csv.CSVReader{CSVConfig: file.CSV} - reader := FileReader{ - FileIdx: fileIdx, - DataReader: &r, - File: file, - WithHeader: *file.CSV.WithHeader, - StopFlag: false, - cleanup: cleanup, - runnerLogger: runnerLogger, - } - reader.BatchMgr = NewBatchMgr(file.Schema, *file.BatchSize, clientRequestChs, errCh) - return &reader, nil - default: - return nil, fmt.Errorf("Wrong file type: %s", *file.Type) - } -} - -func (r *FileReader) startLog() { - fpath, _ := base.FormatFilePath(*r.File.Path) - logger.Log.Infof("Start to read file(%d): %s, schema: < %s >", r.FileIdx, fpath, r.BatchMgr.Schema.String()) -} - -func (r *FileReader) Stop() { - r.StopFlag = true -} - -func (r *FileReader) prepareDataFile() (*string, error) { - local, filename, err := base.ExtractFilename(*r.File.Path) - r.localFile = local - if r.localFile { - // Do nothing for local file, so it wouldn't throw any errors - return &filename, nil - } - if err != nil { - return nil, errors.Wrap(errors.DownloadError, err) - } - - if _, err := url.ParseRequestURI(*r.File.Path); err != nil { - return nil, errors.Wrap(errors.DownloadError, err) - } - - // Download data file from internet to `/tmp` directory and return the path - file, err := ioutil.TempFile("", fmt.Sprintf("*_%s", filename)) - if err != nil { - return nil, errors.Wrap(errors.UnknownError, err) - } - defer file.Close() - - client := http.Client{Timeout: 5 * time.Second} - - resp, err := client.Get(*r.File.Path) - if err != nil { - return nil, errors.Wrap(errors.DownloadError, err) +type ( + baseReader struct { + s source.Source } - defer resp.Body.Close() - - n, err := io.Copy(file, resp.Body) - if err != nil { - return nil, errors.Wrap(errors.DownloadError, err) - } - - filepath := file.Name() - - fpath, _ := base.FormatFilePath(*r.File.Path) - logger.Log.Infof("File(%s) has been downloaded to \"%s\", size: %d", fpath, filepath, n) - - return &filepath, nil -} - -func (r *FileReader) Read() (numErrorLines int64, err error) { - filePath, err := r.prepareDataFile() - if err != nil { - return numErrorLines, err - } - file, err := os.Open(*filePath) - if err != nil { - return numErrorLines, errors.Wrap(errors.ConfigError, err) - } - defer func() { - if err := file.Close(); err != nil { - logger.Log.Errorf("Fail to close opened data file: %s", *filePath) - return - } - if !r.localFile && r.cleanup { - if err := os.Remove(*filePath); err != nil { - logger.Log.Errorf("Fail to remove temp data file: %s", *filePath) - } else { - logger.Log.Infof("Temp downloaded data file has been removed: %s", *filePath) - } - } - }() - - r.DataReader.InitReader(file, r.runnerLogger) - - lineNum := 0 - - if !r.WithHeader { - r.startLog() - } - - for { - data, err := r.DataReader.ReadLine() - if err == io.EOF { - break - } - - lineNum++ - - if err == nil { - if data.Type == base.HEADER { - err = r.BatchMgr.InitSchema(data.Record, r.runnerLogger) - r.startLog() - } else { - if r.File.IsInOrder() { - err = r.BatchMgr.Add(data, r.runnerLogger) - } else { - idx := lineNum % len(r.BatchMgr.Batches) - r.BatchMgr.Batches[idx].Add(data) - } - } - } - - if err != nil { - fpath, _ := base.FormatFilePath(*r.File.Path) - logger.Log.Errorf("Fail to read file(%s) line %d, error: %s", fpath, lineNum, err.Error()) - numErrorLines++ - } - - if r.StopFlag || (r.File.Limit != nil && *r.File.Limit > 0 && *r.File.Limit <= lineNum) { - break - } - } - - r.BatchMgr.Done() - fpath, _ := base.FormatFilePath(*r.File.Path) - logger.Log.Infof("Total lines of file(%s) is: %d, error lines: %d", fpath, lineNum, numErrorLines) +) - return numErrorLines, nil +func (r *baseReader) Source() source.Source { + return r.s } diff --git a/pkg/reader/reader_suite_test.go b/pkg/reader/reader_suite_test.go new file mode 100644 index 00000000..33bbeb8f --- /dev/null +++ b/pkg/reader/reader_suite_test.go @@ -0,0 +1,13 @@ +package reader + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestReader(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg reader Suite") +} diff --git a/pkg/reader/record.go b/pkg/reader/record.go new file mode 100644 index 00000000..aeebdc80 --- /dev/null +++ b/pkg/reader/record.go @@ -0,0 +1,20 @@ +//go:generate mockgen -source=record.go -destination record_mock.go -package reader RecordReader +package reader + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +type ( + RecordReader interface { + Source() source.Source + source.Sizer + Read() (int, spec.Record, error) + } +) + +func NewRecordReader(s source.Source) RecordReader { + // TODO: support other source formats + return NewCSVReader(s) +} diff --git a/pkg/reader/record_mock.go b/pkg/reader/record_mock.go new file mode 100644 index 00000000..e22f676b --- /dev/null +++ b/pkg/reader/record_mock.go @@ -0,0 +1,81 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: record.go + +// Package reader is a generated GoMock package. +package reader + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" + source "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + spec "github.com/vesoft-inc/nebula-importer/v4/pkg/spec" +) + +// MockRecordReader is a mock of RecordReader interface. +type MockRecordReader struct { + ctrl *gomock.Controller + recorder *MockRecordReaderMockRecorder +} + +// MockRecordReaderMockRecorder is the mock recorder for MockRecordReader. +type MockRecordReaderMockRecorder struct { + mock *MockRecordReader +} + +// NewMockRecordReader creates a new mock instance. +func NewMockRecordReader(ctrl *gomock.Controller) *MockRecordReader { + mock := &MockRecordReader{ctrl: ctrl} + mock.recorder = &MockRecordReaderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockRecordReader) EXPECT() *MockRecordReaderMockRecorder { + return m.recorder +} + +// Read mocks base method. +func (m *MockRecordReader) Read() (int, spec.Record, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Read") + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(spec.Record) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// Read indicates an expected call of Read. +func (mr *MockRecordReaderMockRecorder) Read() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read", reflect.TypeOf((*MockRecordReader)(nil).Read)) +} + +// Size mocks base method. +func (m *MockRecordReader) Size() (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Size") + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Size indicates an expected call of Size. +func (mr *MockRecordReaderMockRecorder) Size() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Size", reflect.TypeOf((*MockRecordReader)(nil).Size)) +} + +// Source mocks base method. +func (m *MockRecordReader) Source() source.Source { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Source") + ret0, _ := ret[0].(source.Source) + return ret0 +} + +// Source indicates an expected call of Source. +func (mr *MockRecordReaderMockRecorder) Source() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Source", reflect.TypeOf((*MockRecordReader)(nil).Source)) +} diff --git a/pkg/reader/record_test.go b/pkg/reader/record_test.go new file mode 100644 index 00000000..1b20931e --- /dev/null +++ b/pkg/reader/record_test.go @@ -0,0 +1,33 @@ +package reader + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/source" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("RecordReader", func() { + var s source.Source + BeforeEach(func() { + var err error + s, err = source.New(&source.Config{ + Local: &source.LocalConfig{ + Path: "testdata/local.csv", + }, + }) + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + }) + AfterEach(func() { + err := s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + It("should success", func() { + r := NewRecordReader(s) + Expect(r).NotTo(BeNil()) + }) +}) diff --git a/pkg/reader/testdata/local.csv b/pkg/reader/testdata/local.csv new file mode 100644 index 00000000..0fbc6b59 --- /dev/null +++ b/pkg/reader/testdata/local.csv @@ -0,0 +1,4 @@ +1,2,3 +4, 5,6 + 7,8, 9 +10, 11 , 12 diff --git a/pkg/reader/testdata/local_delimiter.csv b/pkg/reader/testdata/local_delimiter.csv new file mode 100644 index 00000000..bc89fc15 --- /dev/null +++ b/pkg/reader/testdata/local_delimiter.csv @@ -0,0 +1,4 @@ +1|2|3 +4| 5|6 + 7|8| 9 +10| 11 | 12 diff --git a/pkg/reader/testdata/local_failed.csv b/pkg/reader/testdata/local_failed.csv new file mode 100644 index 00000000..b6c9a20c --- /dev/null +++ b/pkg/reader/testdata/local_failed.csv @@ -0,0 +1,4 @@ +id1 +id"2 +id3 +id4 \ No newline at end of file diff --git a/pkg/reader/testdata/local_withHeader.csv b/pkg/reader/testdata/local_withHeader.csv new file mode 100644 index 00000000..10e6a5f4 --- /dev/null +++ b/pkg/reader/testdata/local_withHeader.csv @@ -0,0 +1,2 @@ +h1,h2,h3 +1,2,3 \ No newline at end of file diff --git a/pkg/reader/testdata/local_withHeader_failed.csv b/pkg/reader/testdata/local_withHeader_failed.csv new file mode 100644 index 00000000..e34a6657 --- /dev/null +++ b/pkg/reader/testdata/local_withHeader_failed.csv @@ -0,0 +1 @@ +h1,h2,h"3 \ No newline at end of file diff --git a/pkg/source/config.go b/pkg/source/config.go new file mode 100644 index 00000000..0bc3a68c --- /dev/null +++ b/pkg/source/config.go @@ -0,0 +1,45 @@ +package source + +type ( + Config struct { + Local *LocalConfig `yaml:",inline"` + S3 *S3Config `yaml:"s3,omitempty"` + OSS *OSSConfig `yaml:"oss,omitempty"` + FTP *FTPConfig `yaml:"ftp,omitempty"` + SFTP *SFTPConfig `yaml:"sftp,omitempty"` + HDFS *HDFSConfig `yaml:"hdfs,omitempty"` + // The following is format information + CSV *CSVConfig `yaml:"csv,omitempty"` + } + + CSVConfig struct { + Delimiter string `yaml:"delimiter,omitempty"` + WithHeader bool `yaml:"withHeader,omitempty"` + LazyQuotes bool `yaml:"lazyQuotes,omitempty"` + } +) + +func (c *Config) Clone() *Config { + cpy := *c + switch { + case cpy.S3 != nil: + cpy1 := *cpy.S3 + cpy.S3 = &cpy1 + case cpy.OSS != nil: + cpy1 := *cpy.OSS + cpy.OSS = &cpy1 + case cpy.FTP != nil: + cpy1 := *cpy.FTP + cpy.FTP = &cpy1 + case cpy.SFTP != nil: + cpy1 := *cpy.SFTP + cpy.SFTP = &cpy1 + case cpy.HDFS != nil: + cpy1 := *cpy.HDFS + cpy.HDFS = &cpy1 + default: + cpy1 := *cpy.Local + cpy.Local = &cpy1 + } + return &cpy +} diff --git a/pkg/source/config_test.go b/pkg/source/config_test.go new file mode 100644 index 00000000..762ba11a --- /dev/null +++ b/pkg/source/config_test.go @@ -0,0 +1,82 @@ +package source + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Config", func() { + Describe(".Clone", func() { + It("S3", func() { + c := Config{ + S3: &S3Config{ + Key: "key", + }, + } + c1 := c.Clone() + Expect(c1.S3.Key).To(Equal("key")) + c.S3.Key = "x" + Expect(c1.S3.Key).To(Equal("key")) + }) + + It("OSS", func() { + c := Config{ + OSS: &OSSConfig{ + Key: "key", + }, + } + c1 := c.Clone() + Expect(c1.OSS.Key).To(Equal("key")) + c.OSS.Key = "x" + Expect(c1.OSS.Key).To(Equal("key")) + }) + + It("FTP", func() { + c := Config{ + FTP: &FTPConfig{ + Path: "path", + }, + } + c1 := c.Clone() + Expect(c1.FTP.Path).To(Equal("path")) + c.FTP.Path = "x" + Expect(c1.FTP.Path).To(Equal("path")) + }) + + It("SFTP", func() { + c := Config{ + SFTP: &SFTPConfig{ + Path: "path", + }, + } + c1 := c.Clone() + Expect(c1.SFTP.Path).To(Equal("path")) + c.SFTP.Path = "x" + Expect(c1.SFTP.Path).To(Equal("path")) + }) + + It("HDFS", func() { + c := Config{ + HDFS: &HDFSConfig{ + Path: "path", + }, + } + c1 := c.Clone() + Expect(c1.HDFS.Path).To(Equal("path")) + c.HDFS.Path = "x" + Expect(c1.HDFS.Path).To(Equal("path")) + }) + + It("Local", func() { + c := Config{ + Local: &LocalConfig{ + Path: "path", + }, + } + c1 := c.Clone() + Expect(c1.Local.Path).To(Equal("path")) + c.Local.Path = "x" + Expect(c1.Local.Path).To(Equal("path")) + }) + }) +}) diff --git a/pkg/source/ftp.go b/pkg/source/ftp.go new file mode 100644 index 00000000..0ce6ed08 --- /dev/null +++ b/pkg/source/ftp.go @@ -0,0 +1,91 @@ +package source + +import ( + "fmt" + "time" + + "github.com/jlaffaye/ftp" +) + +var _ Source = (*ftpSource)(nil) + +type ( + FTPConfig struct { + Host string `yaml:"host,omitempty"` + Port int `yaml:"port,omitempty"` + User string `yaml:"username,omitempty"` + Password string `yaml:"password,omitempty"` + Path string `yaml:"path,omitempty"` + } + + ftpSource struct { + c *Config + conn *ftp.ServerConn + r *ftp.Response + size int64 + } +) + +func newFTPSource(c *Config) Source { + return &ftpSource{ + c: c, + } +} + +func (s *ftpSource) Name() string { + return s.c.FTP.String() +} + +func (s *ftpSource) Open() error { + conn, err := ftp.Dial(fmt.Sprintf("%s:%d", s.c.FTP.Host, s.c.FTP.Port), ftp.DialWithTimeout(5*time.Second)) + if err != nil { + return err + } + + err = conn.Login(s.c.FTP.User, s.c.FTP.Password) + if err != nil { + _ = conn.Quit() + return err + } + + size, err := conn.FileSize(s.c.FTP.Path) + if err != nil { + _ = conn.Quit() + return err + } + + r, err := conn.Retr(s.c.FTP.Path) + if err != nil { + _ = conn.Quit() + return err + } + + s.conn = conn + s.r = r + s.size = size + + return nil +} + +func (s *ftpSource) Config() *Config { + return s.c +} + +func (s *ftpSource) Size() (int64, error) { + return s.size, nil +} + +func (s *ftpSource) Read(p []byte) (int, error) { + return s.r.Read(p) +} + +func (s *ftpSource) Close() error { + defer func() { + _ = s.conn.Quit() + }() + return s.r.Close() +} + +func (c *FTPConfig) String() string { + return fmt.Sprintf("ftp %s:%d %s", c.Host, c.Port, c.Path) +} diff --git a/pkg/source/ftp_test.go b/pkg/source/ftp_test.go new file mode 100644 index 00000000..e0f35455 --- /dev/null +++ b/pkg/source/ftp_test.go @@ -0,0 +1,238 @@ +package source + +import ( + "crypto/tls" + stderrors "errors" + "fmt" + "io" + "net" + "os" + "strconv" + "sync" + + ftpserverlib "github.com/fclairamb/ftpserverlib" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "github.com/spf13/afero" +) + +var _ = Describe("ftpSource", func() { + var ( + host = "127.0.0.1" + port = 0 + user = "user" + password = "Password" + fs = afero.NewMemMapFs() + ftpServerDriver *TestFTPServerDriver + ftpServer *ftpserverlib.FtpServer + wgFTPServer sync.WaitGroup + ) + BeforeEach(func() { + ftpServerDriver = &TestFTPServerDriver{ + User: user, + Password: password, + Settings: &ftpserverlib.Settings{ + ListenAddr: fmt.Sprintf("%s:%d", host, port), + }, + Fs: fs, + } + ftpServer = ftpserverlib.NewFtpServer(ftpServerDriver) + err := ftpServer.Listen() + Expect(err).NotTo(HaveOccurred()) + + _, portStr, err := net.SplitHostPort(ftpServer.Addr()) + Expect(err).NotTo(HaveOccurred()) + port, _ = strconv.Atoi(portStr) + + wgFTPServer.Add(1) + go func() { + defer wgFTPServer.Done() + err = ftpServer.Serve() + Expect(err).NotTo(HaveOccurred()) + }() + }) + AfterEach(func() { + _ = ftpServer.Stop() + wgFTPServer.Wait() + }) + It("successfully", func() { + content := []byte("Hello") + f, err := fs.Create("/file") + Expect(err).NotTo(HaveOccurred()) + _, _ = f.Write(content) + _ = f.Close() + + c := Config{ + FTP: &FTPConfig{ + Host: host, + Port: port, + User: user, + Password: password, + Path: "/file", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + + Expect(s.Name()).To(Equal(fmt.Sprintf("ftp 127.0.0.1:%d /file", port))) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(sz).To(Equal(int64(len(content)))) + + var p [32]byte + n, err := s.Read(p[:]) + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(len(content))) + Expect(p[:n]).To(Equal(content)) + + for i := 0; i < 2; i++ { + n, err = s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(0)) + } + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("ftp.Dial failed", func() { + c := Config{ + FTP: &FTPConfig{ + Host: host, + Port: 0, + User: user, + Password: password, + Path: "/file", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Login failed", func() { + c := Config{ + FTP: &FTPConfig{ + Host: host, + Port: port, + User: user, + Password: password + "p", + Path: "/file", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("FileSize failed", func() { + c := Config{ + FTP: &FTPConfig{ + Host: host, + Port: port, + User: user, + Password: password, + Path: "/file-not-exists", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Retr failed", func() { + c := Config{ + FTP: &FTPConfig{ + Host: host, + Port: port, + User: user, + Password: password, + Path: "/file", + }, + } + + ftpServerDriver.OpenFileFunc = func(_ string, _ int, _ os.FileMode) (afero.File, error) { + return nil, stderrors.New("test error") + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) +}) + +// The following is mock ftp server + +var ( + _ ftpserverlib.MainDriver = (*TestFTPServerDriver)(nil) + _ ftpserverlib.ClientDriver = (*TestFTPClientDriver)(nil) +) + +type ( + TestFTPServerDriver struct { + User string + Password string + Settings *ftpserverlib.Settings + Fs afero.Fs + + OpenFileFunc func(name string, flag int, perm os.FileMode) (afero.File, error) + } + TestFTPClientDriver struct { + afero.Fs + serverDriver *TestFTPServerDriver + } +) + +func (d *TestFTPServerDriver) GetSettings() (*ftpserverlib.Settings, error) { + return d.Settings, nil +} + +func (d *TestFTPServerDriver) ClientConnected(_ ftpserverlib.ClientContext) (string, error) { + return "TEST Server", nil +} + +func (d *TestFTPServerDriver) ClientDisconnected(cc ftpserverlib.ClientContext) {} + +func (d *TestFTPServerDriver) AuthUser(_ ftpserverlib.ClientContext, user, pass string) (ftpserverlib.ClientDriver, error) { + if user == d.User && pass == d.Password { + return &TestFTPClientDriver{ + Fs: d.Fs, + serverDriver: d, + }, nil + } + return nil, stderrors.New("bad username or password") +} + +func (d *TestFTPServerDriver) GetTLSConfig() (*tls.Config, error) { + return nil, stderrors.New("TLS is not configured") +} + +func (d *TestFTPClientDriver) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { + if d.serverDriver.OpenFileFunc != nil { + return d.serverDriver.OpenFileFunc(name, flag, perm) + } + return d.Fs.OpenFile(name, flag, perm) +} diff --git a/pkg/source/hdfs.go b/pkg/source/hdfs.go new file mode 100644 index 00000000..a91d0d8d --- /dev/null +++ b/pkg/source/hdfs.go @@ -0,0 +1,87 @@ +package source + +import ( + "fmt" + "strings" + + "github.com/colinmarc/hdfs/v2" + "github.com/colinmarc/hdfs/v2/hadoopconf" +) + +var _ Source = (*hdfsSource)(nil) + +type ( + HDFSConfig struct { + Address string `yaml:"address,omitempty"` + User string `yaml:"user,omitempty"` + Path string `yaml:"path,omitempty"` + } + + hdfsSource struct { + c *Config + cli *hdfs.Client + r *hdfs.FileReader + } +) + +func newHDFSSource(c *Config) Source { + return &hdfsSource{ + c: c, + } +} + +func (s *hdfsSource) Name() string { + return s.c.HDFS.String() +} + +func (s *hdfsSource) Open() error { + // TODO: support kerberos + conf, err := hadoopconf.LoadFromEnvironment() + if err != nil { + return err + } + + options := hdfs.ClientOptionsFromConf(conf) + if s.c.HDFS.Address != "" { + options.Addresses = strings.Split(s.c.HDFS.Address, ",") + } + options.User = s.c.HDFS.User + + cli, err := hdfs.NewClient(options) + if err != nil { + return err + } + + r, err := cli.Open(s.c.HDFS.Path) + if err != nil { + return err + } + + s.cli = cli + s.r = r + + return nil +} + +func (s *hdfsSource) Config() *Config { + return s.c +} + +func (s *hdfsSource) Size() (int64, error) { + return s.r.Stat().Size(), nil +} + +func (s *hdfsSource) Read(p []byte) (int, error) { + return s.r.Read(p) +} + +func (s *hdfsSource) Close() error { + defer func() { + _ = s.cli.Close() + }() + return s.r.Close() +} + +func (c *HDFSConfig) String() string { + return fmt.Sprintf("hdfs %s %s", c.Address, c.Path) +} diff --git a/pkg/source/hdfs_test.go b/pkg/source/hdfs_test.go new file mode 100644 index 00000000..9d9cf3e4 --- /dev/null +++ b/pkg/source/hdfs_test.go @@ -0,0 +1,155 @@ +//go:build linux + +package source + +import ( + stderrors "errors" + "io" + "os" + "testing/fstest" + + "github.com/agiledragon/gomonkey/v2" + "github.com/colinmarc/hdfs/v2" + "github.com/colinmarc/hdfs/v2/hadoopconf" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("hdfsSource", func() { + var ( + address = "nn1:9000,nn2:9000" + user = "user" + content = []byte("Hello") + patches *gomonkey.Patches + hdfsClient = &hdfs.Client{} + hdfsFileReader = &hdfs.FileReader{} + ) + BeforeEach(func() { + patches = gomonkey.NewPatches() + mockFile, err := fstest.MapFS{ + "file": { + Data: content, + }, + }.Open("file") + Expect(err).NotTo(HaveOccurred()) + + patches.ApplyFunc(hdfs.NewClient, func(opts hdfs.ClientOptions) (*hdfs.Client, error) { + Expect(opts.Addresses).To(Equal([]string{"nn1:9000", "nn2:9000"})) + Expect(opts.User).To(Equal(user)) + return hdfsClient, nil + }) + patches.ApplyMethodReturn(hdfsClient, "Open", hdfsFileReader, nil) + patches.ApplyMethodReturn(hdfsClient, "Close", nil) + + patches.ApplyMethod(hdfsFileReader, "Stat", func() os.FileInfo { + fi, err := mockFile.Stat() + Expect(err).NotTo(HaveOccurred()) + return fi + }) + patches.ApplyMethod(hdfsFileReader, "Read", func(_ *hdfs.FileReader, p []byte) (int, error) { + return mockFile.Read(p) + }) + patches.ApplyMethod(hdfsFileReader, "Close", func(_ *hdfs.FileReader) error { + return mockFile.Close() + }) + }) + AfterEach(func() { + patches.Reset() + }) + It("successfully", func() { + c := Config{ + HDFS: &HDFSConfig{ + Address: address, + User: user, + Path: "file", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&hdfsSource{})) + + Expect(s.Name()).To(Equal("hdfs nn1:9000,nn2:9000 file")) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(sz).To(Equal(int64(len(content)))) + + var p [32]byte + n, err := s.Read(p[:]) + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(len(content))) + Expect(p[:n]).To(Equal(content)) + + for i := 0; i < 2; i++ { + n, err = s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(0)) + } + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("LoadFromEnvironment failed", func() { + c := Config{ + HDFS: &HDFSConfig{ + Address: address, + User: user, + Path: "file", + }, + } + + patches.ApplyFuncReturn(hadoopconf.LoadFromEnvironment, hadoopconf.HadoopConf(nil), stderrors.New("test error")) + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&hdfsSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("hdfs.NewClient failed", func() { + c := Config{ + HDFS: &HDFSConfig{ + Address: address, + User: user, + Path: "file", + }, + } + + patches.ApplyFuncReturn(hdfs.NewClient, nil, stderrors.New("test error")) + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&hdfsSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Open failed", func() { + c := Config{ + HDFS: &HDFSConfig{ + Address: address, + User: user, + Path: "file", + }, + } + + patches.ApplyMethodReturn(hdfsClient, "Open", nil, stderrors.New("test error")) + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&hdfsSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) +}) diff --git a/pkg/source/local.go b/pkg/source/local.go new file mode 100644 index 00000000..22d89936 --- /dev/null +++ b/pkg/source/local.go @@ -0,0 +1,61 @@ +package source + +import ( + "fmt" + "os" +) + +var _ Source = (*localSource)(nil) + +type ( + LocalConfig struct { + Path string `yaml:"path,omitempty"` + } + + localSource struct { + c *Config + f *os.File + } +) + +func newLocalSource(c *Config) Source { + return &localSource{ + c: c, + } +} + +func (s *localSource) Name() string { + return s.c.Local.String() +} + +func (s *localSource) Open() error { + f, err := os.Open(s.c.Local.Path) + if err != nil { + return err + } + s.f = f + return nil +} +func (s *localSource) Config() *Config { + return s.c +} + +func (s *localSource) Size() (int64, error) { + fi, err := s.f.Stat() + if err != nil { + return 0, err + } + return fi.Size(), nil +} + +func (s *localSource) Read(p []byte) (int, error) { + return s.f.Read(p) +} + +func (s *localSource) Close() error { + return s.f.Close() +} + +func (c *LocalConfig) String() string { + return fmt.Sprintf("local %s", c.Path) +} diff --git a/pkg/source/local_test.go b/pkg/source/local_test.go new file mode 100644 index 00000000..45c1dd77 --- /dev/null +++ b/pkg/source/local_test.go @@ -0,0 +1,64 @@ +package source + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("localSource", func() { + It("exists", func() { + s := newLocalSource(&Config{ + Local: &LocalConfig{ + Path: "testdata/local.txt", + }, + }) + + Expect(s.Name()).To(Equal("local testdata/local.txt")) + + err := s.Open() + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + + Expect(s.Config()).NotTo(BeNil()) + + nBytes, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(nBytes).To(Equal(int64(6))) + + var buf [1024]byte + n, err := s.Read(buf[:]) + Expect(err).NotTo(HaveOccurred()) + Expect(n).To(Equal(6)) + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("not exists", func() { + s := newLocalSource(&Config{ + Local: &LocalConfig{ + Path: "testdata/not-exists.txt", + }, + }) + err := s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("get size failed", func() { + s := newLocalSource(&Config{ + Local: &LocalConfig{ + Path: "testdata/local.txt", + }, + }) + err := s.Open() + Expect(err).NotTo(HaveOccurred()) + Expect(s).NotTo(BeNil()) + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + + nBytes, err := s.Size() + Expect(err).To(HaveOccurred()) + Expect(nBytes).To(Equal(int64(0))) + }) +}) diff --git a/pkg/source/oss.go b/pkg/source/oss.go new file mode 100644 index 00000000..8d177cab --- /dev/null +++ b/pkg/source/oss.go @@ -0,0 +1,91 @@ +package source + +import ( + "fmt" + "io" + "strconv" + "strings" + + "github.com/aliyun/aliyun-oss-go-sdk/oss" +) + +var _ Source = (*ossSource)(nil) + +type ( + OSSConfig struct { + Endpoint string `yaml:"endpoint,omitempty"` + AccessKey string `yaml:"accessKey,omitempty"` + SecretKey string `yaml:"secretKey,omitempty"` + Bucket string `yaml:"bucket,omitempty"` + Key string `yaml:"key,omitempty"` + } + + ossSource struct { + c *Config + cli *oss.Client + bucket *oss.Bucket + r io.ReadCloser + } +) + +func newOSSSource(c *Config) Source { + return &ossSource{ + c: c, + } +} + +func (s *ossSource) Name() string { + return s.c.OSS.String() +} + +func (s *ossSource) Open() error { + cli, err := oss.New(s.c.OSS.Endpoint, s.c.OSS.AccessKey, s.c.OSS.SecretKey) + if err != nil { + return err + } + + bucket, err := cli.Bucket(s.c.OSS.Bucket) + if err != nil { + return err + } + + r, err := bucket.GetObject(strings.TrimLeft(s.c.OSS.Key, "/")) + if err != nil { + return err + } + + s.cli = cli + s.bucket = bucket + s.r = r + + return nil +} + +func (s *ossSource) Config() *Config { + return s.c +} + +func (s *ossSource) Size() (int64, error) { + meta, err := s.bucket.GetObjectMeta(strings.TrimLeft(s.c.OSS.Key, "/")) + if err != nil { + return 0, err + } + contentLength := meta.Get("Content-Length") + size, err := strconv.ParseInt(contentLength, 10, 64) + if err != nil { + return 0, err + } + return size, nil +} + +func (s *ossSource) Read(p []byte) (int, error) { + return s.r.Read(p) +} + +func (s *ossSource) Close() error { + return s.r.Close() +} + +func (c *OSSConfig) String() string { + return fmt.Sprintf("oss %s %s/%s", c.Endpoint, c.Bucket, c.Key) +} diff --git a/pkg/source/oss_test.go b/pkg/source/oss_test.go new file mode 100644 index 00000000..fead3c57 --- /dev/null +++ b/pkg/source/oss_test.go @@ -0,0 +1,207 @@ +package source + +import ( + "fmt" + "io" + "net/http" + "net/http/httptest" + "strconv" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ossSource", func() { + var ( + httpMux *http.ServeMux + httpServer *httptest.Server + ) + BeforeEach(func() { + httpMux = http.NewServeMux() + httpServer = httptest.NewServer(httpMux) + }) + AfterEach(func() { + httpServer.Close() + }) + It("successfully", func() { + content := []byte("Hello") + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + _, _ = w.Write(content) + case http.MethodHead: + w.Header().Set("Content-Length", strconv.Itoa(len(content))) + default: + Panic() + } + }) + + c := Config{ + OSS: &OSSConfig{ + Endpoint: httpServer.URL, + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + Expect(s.Name()).To(Equal(fmt.Sprintf("oss %s bucket/key", httpServer.URL))) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(sz).To(Equal(int64(len(content)))) + + var p [32]byte + n, err := s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(len(content))) + Expect(p[:n]).To(Equal(content)) + + for i := 0; i < 2; i++ { + n, err = s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(0)) + } + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("oss.New failed", func() { + c := Config{ + OSS: &OSSConfig{ + Endpoint: "\t", + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Bucket failed", func() { + c := Config{ + OSS: &OSSConfig{ + Endpoint: httpServer.URL, + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "b", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("GetObject failed", func() { + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusMethodNotAllowed) + }) + c := Config{ + OSS: &OSSConfig{ + Endpoint: httpServer.URL, + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Size failed", func() { + content := []byte("Hello") + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + _, _ = w.Write(content) + case http.MethodHead: + w.WriteHeader(http.StatusMethodNotAllowed) + default: + Panic() + } + }) + c := Config{ + OSS: &OSSConfig{ + Endpoint: httpServer.URL, + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).To(HaveOccurred()) + Expect(sz).To(Equal(int64(0))) + }) + + It("Size failed", func() { + content := []byte("Hello") + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + _, _ = w.Write(content) + case http.MethodHead: + w.WriteHeader(http.StatusOK) + default: + Panic() + } + }) + c := Config{ + OSS: &OSSConfig{ + Endpoint: httpServer.URL, + AccessKey: "accessKey", + SecretKey: "secretKey", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).To(HaveOccurred()) + Expect(sz).To(Equal(int64(0))) + }) +}) diff --git a/pkg/source/s3.go b/pkg/source/s3.go new file mode 100644 index 00000000..a8abdabe --- /dev/null +++ b/pkg/source/s3.go @@ -0,0 +1,91 @@ +package source + +import ( + "fmt" + "strings" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/credentials" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" +) + +var _ Source = (*s3Source)(nil) + +type ( + S3Config struct { + Endpoint string `yaml:"endpoint,omitempty"` + Region string `yaml:"region,omitempty"` + AccessKey string `yaml:"accessKey,omitempty"` + SecretKey string `yaml:"secretKey,omitempty"` + Token string `yaml:"token,omitempty"` + Bucket string `yaml:"bucket,omitempty"` + Key string `yaml:"key,omitempty"` + } + + s3Source struct { + c *Config + obj *s3.GetObjectOutput + } +) + +func newS3Source(c *Config) Source { + return &s3Source{ + c: c, + } +} + +func (s *s3Source) Name() string { + return s.c.S3.String() +} + +func (s *s3Source) Open() error { + awsConfig := &aws.Config{ + Region: aws.String(s.c.S3.Region), + Endpoint: aws.String(s.c.S3.Endpoint), + S3ForcePathStyle: aws.Bool(true), + } + + if s.c.S3.AccessKey != "" || s.c.S3.SecretKey != "" || s.c.S3.Token != "" { + awsConfig.Credentials = credentials.NewStaticCredentials(s.c.S3.AccessKey, s.c.S3.SecretKey, s.c.S3.Token) + } + + sess, err := session.NewSession(awsConfig) + if err != nil { + return err + } + + svc := s3.New(sess) + + obj, err := svc.GetObject(&s3.GetObjectInput{ + Bucket: aws.String(s.c.S3.Bucket), + Key: aws.String(strings.TrimLeft(s.c.S3.Key, "/")), + }) + if err != nil { + return err + } + + s.obj = obj + + return nil +} + +func (s *s3Source) Config() *Config { + return s.c +} + +func (s *s3Source) Size() (int64, error) { + return *s.obj.ContentLength, nil +} + +func (s *s3Source) Read(p []byte) (int, error) { + return s.obj.Body.Read(p) +} + +func (s *s3Source) Close() error { + return s.obj.Body.Close() +} + +func (c *S3Config) String() string { + return fmt.Sprintf("s3 %s:%s %s/%s", c.Region, c.Endpoint, c.Bucket, c.Key) +} diff --git a/pkg/source/s3_test.go b/pkg/source/s3_test.go new file mode 100644 index 00000000..7ccf49ca --- /dev/null +++ b/pkg/source/s3_test.go @@ -0,0 +1,105 @@ +package source + +import ( + "fmt" + "io" + "net/http" + "net/http/httptest" + "strconv" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("s3Source", func() { + var ( + httpMux *http.ServeMux + httpServer *httptest.Server + ) + BeforeEach(func() { + httpMux = http.NewServeMux() + httpServer = httptest.NewServer(httpMux) + }) + AfterEach(func() { + httpServer.Close() + }) + It("successfully", func() { + content := []byte("Hello") + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + switch r.Method { + case http.MethodGet: + _, _ = w.Write(content) + case http.MethodHead: + w.Header().Set("Content-Length", strconv.Itoa(len(content))) + default: + Panic() + } + }) + + c := Config{ + S3: &S3Config{ + Endpoint: httpServer.URL, + Region: "us-west-2", + AccessKey: "accessKey", + SecretKey: "secretKey", + Token: "token", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&s3Source{})) + + Expect(s.Name()).To(Equal(fmt.Sprintf("s3 us-west-2:%s bucket/key", httpServer.URL))) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(sz).To(Equal(int64(len(content)))) + + var p [32]byte + n, err := s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(len(content))) + Expect(p[:n]).To(Equal(content)) + + for i := 0; i < 2; i++ { + n, err = s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(0)) + } + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + }) + + It("GetObject failed", func() { + httpMux.HandleFunc("/bucket/key", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusMethodNotAllowed) + }) + c := Config{ + S3: &S3Config{ + Endpoint: httpServer.URL, + Region: "us-west-2", + AccessKey: "accessKey", + SecretKey: "secretKey", + Token: "token", + Bucket: "bucket", + Key: "key", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&s3Source{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) +}) diff --git a/pkg/source/sftp.go b/pkg/source/sftp.go new file mode 100644 index 00000000..ae7d642d --- /dev/null +++ b/pkg/source/sftp.go @@ -0,0 +1,133 @@ +package source + +import ( + "fmt" + "os" + "time" + + "github.com/pkg/sftp" + "golang.org/x/crypto/ssh" +) + +var _ Source = (*sftpSource)(nil) + +type ( + SFTPConfig struct { + Host string `yaml:"host,omitempty"` + Port int `yaml:"port,omitempty"` + User string `yaml:"user,omitempty"` + Password string `yaml:"password,omitempty"` + KeyFile string `yaml:"keyFile,omitempty"` + KeyData string `yaml:"keyData,omitempty"` + Passphrase string `yaml:"passphrase,omitempty"` + Path string `yaml:"path,omitempty"` + } + + sftpSource struct { + c *Config + sshCli *ssh.Client + sftpCli *sftp.Client + f *sftp.File + } +) + +func newSFTPSource(c *Config) Source { + return &sftpSource{ + c: c, + } +} + +func (s *sftpSource) Name() string { + return s.c.SFTP.String() +} + +func (s *sftpSource) Open() error { + keyData := s.c.SFTP.KeyData + if keyData == "" && s.c.SFTP.KeyFile != "" { + keyDataBytes, err := os.ReadFile(s.c.SFTP.KeyFile) + if err != nil { + return err + } + keyData = string(keyDataBytes) + } + + authMethod, err := getSSHAuthMethod(s.c.SFTP.Password, keyData, s.c.SFTP.Passphrase) + if err != nil { + return err + } + + sshCli, err := ssh.Dial("tcp", fmt.Sprintf("%s:%d", s.c.SFTP.Host, s.c.SFTP.Port), &ssh.ClientConfig{ + User: s.c.SFTP.User, + Auth: []ssh.AuthMethod{authMethod}, + Timeout: time.Second * 5, + HostKeyCallback: ssh.InsecureIgnoreHostKey(), //nolint: gosec + }) + if err != nil { + return err + } + + sftpCli, err := sftp.NewClient(sshCli) + if err != nil { + _ = sshCli.Close() + return err + } + + f, err := sftpCli.Open(s.c.SFTP.Path) + if err != nil { + _ = sftpCli.Close() + _ = sshCli.Close() + return err + } + + s.sshCli = sshCli + s.sftpCli = sftpCli + s.f = f + + return nil +} + +func (s *sftpSource) Config() *Config { + return s.c +} + +func (s *sftpSource) Size() (int64, error) { + fi, err := s.f.Stat() + if err != nil { + return 0, err + } + return fi.Size(), nil +} + +func (s *sftpSource) Read(p []byte) (int, error) { + return s.f.Read(p) +} + +func (s *sftpSource) Close() error { + defer func() { + _ = s.sftpCli.Close() + _ = s.sshCli.Close() + }() + return s.f.Close() +} + +func getSSHAuthMethod(password, keyData, passphrase string) (ssh.AuthMethod, error) { + if keyData != "" { + key, err := getSSHSigner(keyData, passphrase) + if err != nil { + return nil, err + } + return ssh.PublicKeys(key), nil + } + return ssh.Password(password), nil +} + +func getSSHSigner(keyData, passphrase string) (ssh.Signer, error) { + if passphrase != "" { + return ssh.ParsePrivateKeyWithPassphrase([]byte(keyData), []byte(passphrase)) + } + return ssh.ParsePrivateKey([]byte(keyData)) +} + +func (c *SFTPConfig) String() string { + return fmt.Sprintf("sftp %s:%d %s", c.Host, c.Port, c.Path) +} diff --git a/pkg/source/sftp_test.go b/pkg/source/sftp_test.go new file mode 100644 index 00000000..50867000 --- /dev/null +++ b/pkg/source/sftp_test.go @@ -0,0 +1,435 @@ +package source + +import ( + "bytes" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + stderrors "errors" + "fmt" + "io" + "log" + "net" + "os" + "path/filepath" + "strconv" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + "github.com/pkg/sftp" + "golang.org/x/crypto/ssh" +) + +var _ = Describe("sftpSource", func() { + var ( + tmpdir string + host = "127.0.0.1" + port = 0 + user = "user" + password = "password" + keyFile = "" + keyData = "" + keyFilePassphrase = "" + keyDataPassphrase = "" + passphrase = "ssh passphrase" + sftpServer *TestSFTPServer + ) + BeforeEach(func() { + var err error + tmpdir, err = os.MkdirTemp("", "test") + Expect(err).NotTo(HaveOccurred()) + + // Generate a new RSA private key + privateKey, err := rsa.GenerateKey(rand.Reader, 2048) + Expect(err).NotTo(HaveOccurred()) + privateKeyPEM := &pem.Block{ + Type: "RSA PRIVATE KEY", + Bytes: x509.MarshalPKCS1PrivateKey(privateKey), + } + + keyData = string(pem.EncodeToMemory(privateKeyPEM)) + keyFile = filepath.Join(tmpdir, "id_rsa") + signer, err := getSSHSigner(keyData, "") + Expect(err).NotTo(HaveOccurred()) + err = os.WriteFile(keyFile, []byte(keyData), 0600) + Expect(err).NotTo(HaveOccurred()) + + encryptedPEM, err := x509.EncryptPEMBlock(rand.Reader, privateKeyPEM.Type, privateKeyPEM.Bytes, []byte(passphrase), x509.PEMCipherAES256) //nolint:staticcheck + Expect(err).NotTo(HaveOccurred()) + keyDataPassphrase = string(pem.EncodeToMemory(encryptedPEM)) + keyFilePassphrase = filepath.Join(tmpdir, "id_rsa_passphrase") + signerPassphrase, err := getSSHSigner(keyDataPassphrase, passphrase) + Expect(err).NotTo(HaveOccurred()) + err = os.WriteFile(keyFilePassphrase, []byte(keyDataPassphrase), 0600) + Expect(err).NotTo(HaveOccurred()) + + sftpServer = &TestSFTPServer{ + ListenAddress: fmt.Sprintf("%s:%d", host, port), + User: user, + Password: password, + PrivateKeys: []ssh.Signer{signer, signerPassphrase}, + } + + err = sftpServer.Start() + Expect(err).NotTo(HaveOccurred()) + + _, portStr, err := net.SplitHostPort(sftpServer.Addr()) + Expect(err).NotTo(HaveOccurred()) + port, _ = strconv.Atoi(portStr) + }) + AfterEach(func() { + var err error + sftpServer.Stop() + err = os.RemoveAll(tmpdir) + Expect(err).NotTo(HaveOccurred()) + }) + + It("successfully password", func() { + content := []byte("Hello") + file := filepath.Join(tmpdir, "file") + err := os.WriteFile(file, content, 0600) + Expect(err).NotTo(HaveOccurred()) + + for _, c := range []Config{ + { // password + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + Password: password, + Path: file, + }, + }, + { // key file + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFile, + Path: file, + }, + }, + { // key file with passphrase + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase, + Path: file, + }, + }, + } { + c := c + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + Expect(s.Name()).To(Equal(fmt.Sprintf("sftp 127.0.0.1:%d %s", port, file))) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sz, err := s.Size() + Expect(err).NotTo(HaveOccurred()) + Expect(sz).To(Equal(int64(len(content)))) + + var p [32]byte + n, err := s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(len(content))) + Expect(p[:n]).To(Equal(content)) + + for i := 0; i < 2; i++ { + n, err = s.Read(p[:]) + Expect(err).To(Equal(io.EOF)) + Expect(n).To(Equal(0)) + } + + err = s.Close() + Expect(err).NotTo(HaveOccurred()) + } + }) + + It("get size failed", func() { + content := []byte("Hello") + file := filepath.Join(tmpdir, "file") + err := os.WriteFile(file, content, 0600) + Expect(err).NotTo(HaveOccurred()) + + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase, + Path: file, + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + Expect(s.Name()).To(Equal(fmt.Sprintf("sftp 127.0.0.1:%d %s", port, file))) + + Expect(s.Config()).NotTo(BeNil()) + + err = s.Open() + Expect(err).NotTo(HaveOccurred()) + + sftpServer.Stop() + + sz, err := s.Size() + Expect(err).To(HaveOccurred()) + Expect(sz).To(Equal(int64(0))) + }) + + It("read key file failed", func() { + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase + "x", + Passphrase: passphrase, + Path: "", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("getSSHAuthMethod failed", func() { + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase + "x", + Path: "", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("ssh.Dial failed", func() { + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: 0, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase, + Path: "", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("sftp.NewClient failed", func() { + sftpServer.DisableSubsystem = true + + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase, + Path: "", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) + + It("Open file failed", func() { + c := Config{ + SFTP: &SFTPConfig{ + Host: host, + Port: port, + User: user, + KeyFile: keyFilePassphrase, + Passphrase: passphrase, + Path: "x", + }, + } + + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + + err = s.Open() + Expect(err).To(HaveOccurred()) + }) +}) + +// The following is mock sftp server + +type ( + TestSFTPServer struct { + ListenAddress string + User string + Password string + PrivateKeys []ssh.Signer + DisableSubsystem bool + + serverConfig *ssh.ServerConfig + listener net.Listener + conns []net.Conn + } +) + +func (s *TestSFTPServer) Start() error { + serverConfig := &ssh.ServerConfig{ + PasswordCallback: func(c ssh.ConnMetadata, pass []byte) (*ssh.Permissions, error) { + if c.User() == s.User && string(pass) == s.Password { + return nil, nil + } + return nil, stderrors.New("bad username or password") + }, + PublicKeyCallback: func(conn ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) { + if conn.User() == s.User { + for _, privateKey := range s.PrivateKeys { + if bytes.Equal(key.Marshal(), privateKey.PublicKey().Marshal()) { + return nil, nil + } + } + } + return nil, fmt.Errorf("pubkey for %q not acceptable", conn.User()) + }, + } + + for _, privateKey := range s.PrivateKeys { + serverConfig.AddHostKey(privateKey) + } + + s.serverConfig = serverConfig + + if err := s.listen(); err != nil { + return err + } + + go s.acceptLoop() + + return nil +} + +func (s *TestSFTPServer) Addr() string { + return s.listener.Addr().String() +} + +func (s *TestSFTPServer) Stop() { + s.listener.Close() + for _, conn := range s.conns { + conn.Close() + } + s.conns = nil +} + +func (s *TestSFTPServer) listen() error { + listener, err := net.Listen("tcp", s.ListenAddress) + if err != nil { + return err + } + s.listener = listener + return nil +} + +func (s *TestSFTPServer) acceptLoop() { + for { + conn, err := s.listener.Accept() + if err != nil { + log.Printf("accept failed %v", err) + return + } + s.conns = append(s.conns, conn) + go s.handlerConn(conn) + } +} + +func (s *TestSFTPServer) handlerConn(conn net.Conn) { + defer conn.Close() + serverConn, chans, reqs, err := ssh.NewServerConn(conn, s.serverConfig) + if err != nil { + log.Printf("create ssh session conn failed %v", err) + return + } + + defer serverConn.Close() + go ssh.DiscardRequests(reqs) + for newChannel := range chans { + go s.handlerNewChannel(newChannel) + } +} + +func (s *TestSFTPServer) handlerNewChannel(newChannel ssh.NewChannel) { + if newChannel.ChannelType() != "session" { + newChannel.Reject(ssh.UnknownChannelType, "unknown channel type") + log.Printf("unknown channel type: %s", newChannel.ChannelType()) + return + } + + channel, requests, err := newChannel.Accept() + if err != nil { + log.Printf("accept channel %v", err) + return + } + defer channel.Close() + + go func(in <-chan *ssh.Request) { + for req := range in { + ok := false + switch req.Type { //nolint:gocritic + // Here we handle only the "subsystem" request. + case "subsystem": + if !s.DisableSubsystem && string(req.Payload[4:]) == "sftp" { + ok = true + } + } + req.Reply(ok, nil) + } + }(requests) + + server, err := sftp.NewServer( + channel, + sftp.ReadOnly(), + ) + if err != nil { + log.Printf("create sftp server failed %v", err) + return + } + defer server.Close() + + if err = server.Serve(); err != io.EOF { + log.Printf("sftp server failed %v", err) + } +} diff --git a/pkg/source/source.go b/pkg/source/source.go new file mode 100644 index 00000000..2152c73c --- /dev/null +++ b/pkg/source/source.go @@ -0,0 +1,39 @@ +//go:generate mockgen -source=source.go -destination source_mock.go -package source Source,Sizer +package source + +import ( + "io" +) + +type ( + Source interface { + Config() *Config + Name() string + Open() error + Sizer + io.Reader + io.Closer + } + + Sizer interface { + Size() (int64, error) + } +) + +func New(c *Config) (Source, error) { + // TODO: support blob and so on + switch { + case c.S3 != nil: + return newS3Source(c), nil + case c.OSS != nil: + return newOSSSource(c), nil + case c.FTP != nil: + return newFTPSource(c), nil + case c.SFTP != nil: + return newSFTPSource(c), nil + case c.HDFS != nil: + return newHDFSSource(c), nil + default: + return newLocalSource(c), nil + } +} diff --git a/pkg/source/source_mock.go b/pkg/source/source_mock.go new file mode 100644 index 00000000..e8cd4c65 --- /dev/null +++ b/pkg/source/source_mock.go @@ -0,0 +1,158 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: source.go + +// Package source is a generated GoMock package. +package source + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockSource is a mock of Source interface. +type MockSource struct { + ctrl *gomock.Controller + recorder *MockSourceMockRecorder +} + +// MockSourceMockRecorder is the mock recorder for MockSource. +type MockSourceMockRecorder struct { + mock *MockSource +} + +// NewMockSource creates a new mock instance. +func NewMockSource(ctrl *gomock.Controller) *MockSource { + mock := &MockSource{ctrl: ctrl} + mock.recorder = &MockSourceMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockSource) EXPECT() *MockSourceMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockSource) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockSourceMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockSource)(nil).Close)) +} + +// Config mocks base method. +func (m *MockSource) Config() *Config { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Config") + ret0, _ := ret[0].(*Config) + return ret0 +} + +// Config indicates an expected call of Config. +func (mr *MockSourceMockRecorder) Config() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Config", reflect.TypeOf((*MockSource)(nil).Config)) +} + +// Name mocks base method. +func (m *MockSource) Name() string { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Name") + ret0, _ := ret[0].(string) + return ret0 +} + +// Name indicates an expected call of Name. +func (mr *MockSourceMockRecorder) Name() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Name", reflect.TypeOf((*MockSource)(nil).Name)) +} + +// Open mocks base method. +func (m *MockSource) Open() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open") + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *MockSourceMockRecorder) Open() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*MockSource)(nil).Open)) +} + +// Read mocks base method. +func (m *MockSource) Read(p []byte) (int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Read", p) + ret0, _ := ret[0].(int) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Read indicates an expected call of Read. +func (mr *MockSourceMockRecorder) Read(p interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Read", reflect.TypeOf((*MockSource)(nil).Read), p) +} + +// Size mocks base method. +func (m *MockSource) Size() (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Size") + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Size indicates an expected call of Size. +func (mr *MockSourceMockRecorder) Size() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Size", reflect.TypeOf((*MockSource)(nil).Size)) +} + +// MockSizer is a mock of Sizer interface. +type MockSizer struct { + ctrl *gomock.Controller + recorder *MockSizerMockRecorder +} + +// MockSizerMockRecorder is the mock recorder for MockSizer. +type MockSizerMockRecorder struct { + mock *MockSizer +} + +// NewMockSizer creates a new mock instance. +func NewMockSizer(ctrl *gomock.Controller) *MockSizer { + mock := &MockSizer{ctrl: ctrl} + mock.recorder = &MockSizerMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockSizer) EXPECT() *MockSizerMockRecorder { + return m.recorder +} + +// Size mocks base method. +func (m *MockSizer) Size() (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Size") + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Size indicates an expected call of Size. +func (mr *MockSizerMockRecorder) Size() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Size", reflect.TypeOf((*MockSizer)(nil).Size)) +} diff --git a/pkg/source/source_suite_test.go b/pkg/source/source_suite_test.go new file mode 100644 index 00000000..e41bd9b5 --- /dev/null +++ b/pkg/source/source_suite_test.go @@ -0,0 +1,13 @@ +package source + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestSource(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg source Suite") +} diff --git a/pkg/source/source_test.go b/pkg/source/source_test.go new file mode 100644 index 00000000..d84550c3 --- /dev/null +++ b/pkg/source/source_test.go @@ -0,0 +1,74 @@ +package source + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Source", func() { + It("S3", func() { + c := Config{ + S3: &S3Config{ + Key: "key", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&s3Source{})) + }) + + It("OSS", func() { + c := Config{ + OSS: &OSSConfig{ + Key: "key", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ossSource{})) + }) + + It("FTP", func() { + c := Config{ + FTP: &FTPConfig{ + Path: "path", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&ftpSource{})) + }) + + It("SFTP", func() { + c := Config{ + SFTP: &SFTPConfig{ + Path: "path", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&sftpSource{})) + }) + + It("HDFS", func() { + c := Config{ + HDFS: &HDFSConfig{ + Path: "path", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&hdfsSource{})) + }) + + It("Local", func() { + c := Config{ + Local: &LocalConfig{ + Path: "path", + }, + } + s, err := New(&c) + Expect(err).NotTo(HaveOccurred()) + Expect(s).To(BeAssignableToTypeOf(&localSource{})) + }) +}) diff --git a/pkg/source/testdata/local.txt b/pkg/source/testdata/local.txt new file mode 100644 index 00000000..e965047a --- /dev/null +++ b/pkg/source/testdata/local.txt @@ -0,0 +1 @@ +Hello diff --git a/pkg/spec/base/builder.go b/pkg/spec/base/builder.go new file mode 100644 index 00000000..f326c638 --- /dev/null +++ b/pkg/spec/base/builder.go @@ -0,0 +1,15 @@ +//go:generate mockgen -source=builder.go -destination builder_mock.go -package specbase StatementBuilder +package specbase + +type ( + // StatementBuilder is the interface to build statement + StatementBuilder interface { + Build(records ...Record) (string, error) + } + + StatementBuilderFunc func(records ...Record) (string, error) +) + +func (f StatementBuilderFunc) Build(records ...Record) (string, error) { + return f(records...) +} diff --git a/pkg/spec/base/builder_mock.go b/pkg/spec/base/builder_mock.go new file mode 100644 index 00000000..5bb51fa7 --- /dev/null +++ b/pkg/spec/base/builder_mock.go @@ -0,0 +1,53 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: builder.go + +// Package specbase is a generated GoMock package. +package specbase + +import ( + reflect "reflect" + + gomock "github.com/golang/mock/gomock" +) + +// MockStatementBuilder is a mock of StatementBuilder interface. +type MockStatementBuilder struct { + ctrl *gomock.Controller + recorder *MockStatementBuilderMockRecorder +} + +// MockStatementBuilderMockRecorder is the mock recorder for MockStatementBuilder. +type MockStatementBuilderMockRecorder struct { + mock *MockStatementBuilder +} + +// NewMockStatementBuilder creates a new mock instance. +func NewMockStatementBuilder(ctrl *gomock.Controller) *MockStatementBuilder { + mock := &MockStatementBuilder{ctrl: ctrl} + mock.recorder = &MockStatementBuilderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockStatementBuilder) EXPECT() *MockStatementBuilderMockRecorder { + return m.recorder +} + +// Build mocks base method. +func (m *MockStatementBuilder) Build(records ...Record) (string, error) { + m.ctrl.T.Helper() + varargs := []interface{}{} + for _, a := range records { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Build", varargs...) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Build indicates an expected call of Build. +func (mr *MockStatementBuilderMockRecorder) Build(records ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Build", reflect.TypeOf((*MockStatementBuilder)(nil).Build), records...) +} diff --git a/pkg/spec/base/builder_test.go b/pkg/spec/base/builder_test.go new file mode 100644 index 00000000..fe3b5279 --- /dev/null +++ b/pkg/spec/base/builder_test.go @@ -0,0 +1,17 @@ +package specbase + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("StatementBuilderFunc", func() { + It("", func() { + var b StatementBuilder = StatementBuilderFunc(func(records ...Record) (string, error) { + return "test statement", nil + }) + statement, err := b.Build() + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("test statement")) + }) +}) diff --git a/pkg/spec/base/record.go b/pkg/spec/base/record.go new file mode 100644 index 00000000..5dd871f6 --- /dev/null +++ b/pkg/spec/base/record.go @@ -0,0 +1,6 @@ +package specbase + +type ( + Record []string + Records []Record +) diff --git a/pkg/spec/base/spec.go b/pkg/spec/base/spec.go new file mode 100644 index 00000000..3bd77b9d --- /dev/null +++ b/pkg/spec/base/spec.go @@ -0,0 +1 @@ +package specbase diff --git a/pkg/spec/base/spec_suite_test.go b/pkg/spec/base/spec_suite_test.go new file mode 100644 index 00000000..d2d0036d --- /dev/null +++ b/pkg/spec/base/spec_suite_test.go @@ -0,0 +1,13 @@ +package specbase + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestSpec(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg spec base Suite") +} diff --git a/pkg/spec/spec.go b/pkg/spec/spec.go new file mode 100644 index 00000000..d4ed4c71 --- /dev/null +++ b/pkg/spec/spec.go @@ -0,0 +1,10 @@ +package spec + +import specbase "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/base" + +type ( + Record = specbase.Record + Records = specbase.Records + StatementBuilder = specbase.StatementBuilder + StatementBuilderFunc = specbase.StatementBuilderFunc +) diff --git a/pkg/spec/v3/edge.go b/pkg/spec/v3/edge.go new file mode 100644 index 00000000..58da0454 --- /dev/null +++ b/pkg/spec/v3/edge.go @@ -0,0 +1,278 @@ +package specv3 + +import ( + "fmt" + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/bytebufferpool" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +type ( + Edge struct { + Name string `yaml:"name"` + Src *EdgeNodeRef `yaml:"src"` + Dst *EdgeNodeRef `yaml:"dst"` + Rank *Rank `yaml:"rank"` + Props Props `yaml:"props,omitempty"` + + IgnoreExistedIndex *bool `yaml:"ignoreExistedIndex,omitempty"` + + fnInsertStatement func(records ...Record) (string, error) + insertPrefix string // "INSERT EDGE name(prop_name, ..., prop_name) VALUES " + } + + EdgeNodeRef struct { + Name string `yaml:"-"` + ID *NodeID `yaml:"id"` + } + + Edges []*Edge + + EdgeOption func(*Edge) +) + +func NewEdge(name string, opts ...EdgeOption) *Edge { + e := &Edge{ + Name: name, + } + e.Options(opts...) + + return e +} + +func WithEdgeSrc(src *EdgeNodeRef) EdgeOption { + return func(e *Edge) { + e.Src = src + } +} + +func WithEdgeDst(dst *EdgeNodeRef) EdgeOption { + return func(e *Edge) { + e.Dst = dst + } +} + +func WithRank(rank *Rank) EdgeOption { + return func(e *Edge) { + e.Rank = rank + } +} + +func WithEdgeProps(props ...*Prop) EdgeOption { + return func(e *Edge) { + e.Props = append(e.Props, props...) + } +} + +func WithEdgeIgnoreExistedIndex(ignore bool) EdgeOption { + return func(e *Edge) { + e.IgnoreExistedIndex = &ignore + } +} + +func (e *Edge) Options(opts ...EdgeOption) *Edge { + for _, opt := range opts { + opt(e) + } + return e +} + +func (e *Edge) Complete() { + if e.Src != nil { + e.Src.Complete() + e.Src.Name = strSrc + if e.Src.ID != nil { + e.Src.ID.Name = strVID + } + } + if e.Dst != nil { + e.Dst.Complete() + e.Dst.Name = strDst + if e.Dst.ID != nil { + e.Dst.ID.Name = strVID + } + } + + e.fnInsertStatement = e.insertStatementWithoutRank + if e.Rank != nil { + e.Rank.Complete() + e.fnInsertStatement = e.insertStatementWithRank + } + + e.Props.Complete() + + // default enable IGNORE_EXISTED_INDEX + insertPrefixFmt := "INSERT EDGE IGNORE_EXISTED_INDEX %s(%s) VALUES " + if e.IgnoreExistedIndex != nil && !*e.IgnoreExistedIndex { + insertPrefixFmt = "INSERT EDGE %s(%s) VALUES " + } + + e.insertPrefix = fmt.Sprintf( + insertPrefixFmt, + utils.ConvertIdentifier(e.Name), + strings.Join(e.Props.NameList(), ", "), + ) +} + +func (e *Edge) Validate() error { + if e.Name == "" { + return e.importError(errors.ErrNoEdgeName) + } + + if e.Src == nil { + return e.importError(errors.ErrNoEdgeSrc) + } + + if err := e.Src.Validate(); err != nil { + return e.importError(err) + } + + if e.Dst == nil { + return e.importError(errors.ErrNoEdgeDst) + } + + if err := e.Dst.Validate(); err != nil { + return e.importError(err) + } + + if e.Rank != nil { + if err := e.Rank.Validate(); err != nil { + return err + } + } + + if err := e.Props.Validate(); err != nil { + return e.importError(err) + } + + return nil +} + +func (e *Edge) InsertStatement(records ...Record) (string, error) { + return e.fnInsertStatement(records...) +} + +func (e *Edge) insertStatementWithoutRank(records ...Record) (string, error) { + buff := bytebufferpool.Get() + defer bytebufferpool.Put(buff) + + buff.SetString(e.insertPrefix) + + for i, record := range records { + srcIDValue, err := e.Src.IDValue(record) + if err != nil { + return "", e.importError(err) + } + dstIDValue, err := e.Dst.IDValue(record) + if err != nil { + return "", e.importError(err) + } + propsValueList, err := e.Props.ValueList(record) + if err != nil { + return "", e.importError(err) + } + + if i > 0 { + _, _ = buff.WriteString(", ") + } + + // "%s->%s:(%s)" + _, _ = buff.WriteString(srcIDValue) + _, _ = buff.WriteString("->") + _, _ = buff.WriteString(dstIDValue) + _, _ = buff.WriteString(":(") + _, _ = buff.WriteStringSlice(propsValueList, ", ") + _, _ = buff.WriteString(")") + } + return buff.String(), nil +} + +func (e *Edge) insertStatementWithRank(records ...Record) (string, error) { + buff := bytebufferpool.Get() + defer bytebufferpool.Put(buff) + + buff.SetString(e.insertPrefix) + + for i, record := range records { + srcIDValue, err := e.Src.IDValue(record) + if err != nil { + return "", e.importError(err) + } + dstIDValue, err := e.Dst.IDValue(record) + if err != nil { + return "", e.importError(err) + } + rankValue, err := e.Rank.Value(record) + if err != nil { + return "", e.importError(err) + } + propsValueList, err := e.Props.ValueList(record) + if err != nil { + return "", e.importError(err) + } + + if i > 0 { + _, _ = buff.WriteString(", ") + } + + // "%s->%s@%s:(%s)" + _, _ = buff.WriteString(srcIDValue) + _, _ = buff.WriteString("->") + _, _ = buff.WriteString(dstIDValue) + _, _ = buff.WriteString("@") + _, _ = buff.WriteString(rankValue) + _, _ = buff.WriteString(":(") + _, _ = buff.WriteStringSlice(propsValueList, ", ") + _, _ = buff.WriteString(")") + } + return buff.String(), nil +} + +func (e *Edge) importError(err error, formatWithArgs ...any) *errors.ImportError { //nolint:unparam + return errors.AsOrNewImportError(err, formatWithArgs...).SetEdgeName(e.Name) +} + +func (n *EdgeNodeRef) Complete() { + if n.ID != nil { + n.ID.Complete() + } +} + +func (n *EdgeNodeRef) Validate() error { + if n.Name == "" { + return n.importError(errors.ErrNoNodeName) + } + if n.ID == nil { + return n.importError(errors.ErrNoNodeID) + } + //revive:disable-next-line:if-return + if err := n.ID.Validate(); err != nil { + return err + } + return nil +} + +func (n *EdgeNodeRef) IDValue(record Record) (string, error) { + return n.ID.Value(record) +} + +func (n *EdgeNodeRef) importError(err error, formatWithArgs ...any) *errors.ImportError { + return errors.AsOrNewImportError(err, formatWithArgs...).SetNodeName(n.Name) +} + +func (es Edges) Complete() { + for i := range es { + es[i].Complete() + } +} + +func (es Edges) Validate() error { + for i := range es { + if err := es[i].Validate(); err != nil { + return err + } + } + return nil +} diff --git a/pkg/spec/v3/edge_test.go b/pkg/spec/v3/edge_test.go new file mode 100644 index 00000000..411ef040 --- /dev/null +++ b/pkg/spec/v3/edge_test.go @@ -0,0 +1,701 @@ +package specv3 + +import ( + stderrors "errors" + "fmt" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Edge", func() { + Describe(".Complete", func() { + It("should complete", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithEdgeProps(&Prop{Name: "prop1", Type: ValueTypeString}), + WithEdgeProps(&Prop{Name: "prop2", Type: ValueTypeInt}), + ) + edge.Complete() + + Expect(edge.Name).To(Equal("name")) + + Expect(edge.Src.Name).To(Equal(strSrc)) + Expect(edge.Src.ID.Name).To(Equal(strVID)) + Expect(edge.Src.ID.Type).To(Equal(ValueTypeInt)) + + Expect(edge.Dst.Name).To(Equal(strDst)) + Expect(edge.Dst.ID.Name).To(Equal(strVID)) + Expect(edge.Dst.ID.Type).To(Equal(ValueTypeString)) + + Expect(edge.Props).To(HaveLen(2)) + Expect(edge.Props[0].Name).To(Equal("prop1")) + Expect(edge.Props[0].Type).To(Equal(ValueTypeString)) + Expect(edge.Props[1].Name).To(Equal("prop2")) + Expect(edge.Props[1].Type).To(Equal(ValueTypeInt)) + }) + }) + + Describe(".Validate", func() { + It("no name", func() { + edge := NewEdge("") + err := edge.Validate() + Expect(stderrors.Is(err, errors.ErrNoEdgeName)).To(BeTrue()) + }) + + It("no src", func() { + edge := NewEdge("name") + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoEdgeSrc)).To(BeTrue()) + }) + + It("src validate failed", func() { + edge := NewEdge("name", WithEdgeSrc(&EdgeNodeRef{ + Name: "node", + })) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoNodeID)).To(BeTrue()) + }) + + It("no dst", func() { + edge := NewEdge("name", WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + })) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoEdgeDst)).To(BeTrue()) + }) + + It("dst validate failed", func() { + edge := NewEdge("name", WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + })) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoNodeID)).To(BeTrue()) + }) + + It("dst validate failed 2", func() { + edge := NewEdge("name", WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{}, + })) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoNodeIDName)).To(BeTrue()) + }) + + It("props validate failed", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithEdgeProps(&Prop{Name: "prop"}), + ) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + }) + + It("success without props", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + ) + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("success with props", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithEdgeProps(&Prop{Name: "prop", Type: ValueTypeString}), + ) + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("WithRank failed", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithRank(&Rank{Index: -1}), + ) + err := edge.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrInvalidIndex)).To(BeTrue()) + }) + + It("WithRank successfully", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithRank(&Rank{Index: 0}), + ) + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Describe(".InsertStatement", func() { + When("no props", func() { + var edge *Edge + BeforeEach(func() { + edge = NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`() VALUES 1->\"id1\":()")) + }) + + It("two record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}, []string{"2", "id2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`() VALUES 1->\"id1\":(), 2->\"id2\":()")) + }) + + It("src failed", func() { + statement, err := edge.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("dst failed", func() { + statement, err := edge.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("one prop", func() { + var edge *Edge + BeforeEach(func() { + edge = NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + WithEdgeProps( + &Prop{Name: "prop1", Type: ValueTypeString, Index: 3}, + ), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`) VALUES 1->\"id1\":(\"str1\")")) + }) + + It("two record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}, []string{"2", "id2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`) VALUES 1->\"id1\":(\"str1\"), 2->\"id2\":(\"str2\")")) + }) + + It("src failed", func() { + statement, err := edge.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("dst failed", func() { + statement, err := edge.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("props failed", func() { + statement, err := edge.InsertStatement([]string{"1", "id1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("many props", func() { + var edge *Edge + BeforeEach(func() { + edge = NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + WithEdgeProps( + &Prop{Name: "prop1", Type: ValueTypeString, Index: 3}, + &Prop{Name: "prop2", Type: ValueTypeDouble, Index: 2}, + ), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1->\"id1\":(\"str1\", 1.1)")) + }) + + It("two record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1.1", "str1"}, []string{"2", "id2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1->\"id1\":(\"str1\", 1.1), 2->\"id2\":(\"str2\", 2.2)")) + }) + + It("src failed", func() { + statement, err := edge.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("dst failed", func() { + statement, err := edge.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("props failed", func() { + statement, err := edge.InsertStatement([]string{"1", "id1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("WithRank", func() { + var edge *Edge + BeforeEach(func() { + edge = NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + WithRank(&Rank{Index: 2}), + WithEdgeProps( + &Prop{Name: "prop1", Type: ValueTypeString, Index: 4}, + &Prop{Name: "prop2", Type: ValueTypeDouble, Index: 3}, + ), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1->\"id1\"@1:(\"str1\", 1.1)")) + }) + + It("two record", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1", "1.1", "str1"}, []string{"2", "id2", "2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1->\"id1\"@1:(\"str1\", 1.1), 2->\"id2\"@2:(\"str2\", 2.2)")) + }) + + It("src failed", func() { + statement, err := edge.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("dst failed", func() { + statement, err := edge.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("rank failed", func() { + statement, err := edge.InsertStatement([]string{"1", "id1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("props failed", func() { + statement, err := edge.InsertStatement([]string{"1", "id1", "1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("WithEdgeIgnoreExistedIndex", func() { + It("WithEdgeIgnoreExistedIndex false", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + WithEdgeIgnoreExistedIndex(false), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + + statement, err := edge.InsertStatement([]string{"1", "id1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE `name`() VALUES 1->\"id1\":()")) + }) + It("WithEdgeIgnoreExistedIndex true", func() { + edge := NewEdge( + "name", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 1, + }, + }), + WithEdgeIgnoreExistedIndex(true), + ) + edge.Complete() + err := edge.Validate() + Expect(err).NotTo(HaveOccurred()) + + statement, err := edge.InsertStatement([]string{"1", "id1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `name`() VALUES 1->\"id1\":()")) + }) + }) + }) +}) + +var _ = Describe("Edges", func() { + Describe(".Complete", func() { + It("should complete", func() { + edges := Edges{ + NewEdge( + "name1", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithEdgeProps(&Prop{}), + ), + NewEdge( + "name2", + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + WithEdgeProps(&Prop{}), + ), + } + edges.Complete() + Expect(edges).To(HaveLen(2)) + Expect(edges[0].Name).To(Equal("name1")) + Expect(edges[1].Name).To(Equal("name2")) + }) + }) + + Describe(".Validate", func() { + var edges Edges + BeforeEach(func() { + for i := 1; i <= 4; i++ { + edges = append(edges, NewEdge( + fmt.Sprintf("name%d", i), + WithEdgeSrc(&EdgeNodeRef{ + Name: "srcNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "dstNodeName", + ID: &NodeID{ + Name: "id", + Type: ValueTypeString, + }, + }), + )) + } + }) + DescribeTable("table cases", + func(getEdges func() Edges, failedIndex int) { + es := getEdges() + err := es.Validate() + if failedIndex >= 0 { + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(es[failedIndex].Validate())) + } else { + Expect(err).NotTo(HaveOccurred()) + } + }, + Entry("empty nodes", + func() Edges { return Edges{} }, + -1, + ), + Entry("success", + func() Edges { return edges }, + -1, + ), + Entry("failed at 0", + func() Edges { + return Edges{ + NewEdge(""), + edges[0], + edges[1], + edges[2], + edges[3], + } + }, + 0, + ), + Entry("failed at 1", + func() Edges { + return Edges{ + edges[0], + NewEdge("failed"), + edges[1], + edges[2], + edges[3], + } + }, + 1, + ), + Entry("failed at end", + func() Edges { + return Edges{ + edges[0], + edges[1], + edges[2], + edges[3], + NewEdge("failed", WithEdgeSrc(&EdgeNodeRef{})), + } + }, + 4, + ), + Entry("failed at id validate", + func() Edges { + return Edges{ + edges[0], + edges[1], + edges[2], + edges[3], + NewEdge("failed", WithEdgeSrc(&EdgeNodeRef{ID: &NodeID{ + Type: "unsupported", + }})), + } + }, + 4, + ), + ) + }) +}) diff --git a/pkg/spec/v3/graph.go b/pkg/spec/v3/graph.go new file mode 100644 index 00000000..6c96c024 --- /dev/null +++ b/pkg/spec/v3/graph.go @@ -0,0 +1,122 @@ +package specv3 + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + specbase "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/base" +) + +type ( + Graph struct { + Name string `yaml:"name"` + Nodes Nodes `yaml:"tags,omitempty"` + Edges Edges `yaml:"edges,omitempty"` + } + + GraphOption func(*Graph) +) + +func NewGraph(name string, opts ...GraphOption) *Graph { + g := &Graph{ + Name: name, + } + + for _, opt := range opts { + opt(g) + } + + return g +} + +func WithGraphNodes(nodes ...*Node) GraphOption { + return func(g *Graph) { + g.AddNodes(nodes...) + } +} + +func WithGraphEdges(edges ...*Edge) GraphOption { + return func(g *Graph) { + g.AddEdges(edges...) + } +} + +func (g *Graph) AddNodes(nodes ...*Node) { + g.Nodes = append(g.Nodes, nodes...) +} + +func (g *Graph) AddEdges(edges ...*Edge) { + g.Edges = append(g.Edges, edges...) +} + +func (g *Graph) Complete() { + if g.Nodes != nil { + g.Nodes.Complete() + } + if g.Edges != nil { + g.Edges.Complete() + } +} + +func (g *Graph) Validate() error { + if g.Name == "" { + return errors.ErrNoSpaceName + } + if err := g.Nodes.Validate(); err != nil { + return err + } + //revive:disable-next-line:if-return + if err := g.Edges.Validate(); err != nil { + return err + } + + return nil +} + +func (g *Graph) InsertNodeStatement(n *Node, records ...Record) (string, error) { + statement, err := n.InsertStatement(records...) + if err != nil { + return "", g.importError(err).SetGraphName(g.Name).SetNodeName(n.Name) + } + return statement, nil +} + +func (g *Graph) InsertNodeBuilder(n *Node) specbase.StatementBuilder { + return specbase.StatementBuilderFunc(func(records ...specbase.Record) (string, error) { + return g.InsertNodeStatement(n, records...) + }) +} + +func (g *Graph) InsertEdgeStatement(e *Edge, records ...Record) (string, error) { + statement, err := e.InsertStatement(records...) + if err != nil { + return "", g.importError(err).SetGraphName(g.Name).SetEdgeName(e.Name) + } + return statement, nil +} + +func (g *Graph) InsertEdgeBuilder(e *Edge) specbase.StatementBuilder { + return specbase.StatementBuilderFunc(func(records ...specbase.Record) (string, error) { + return g.InsertEdgeStatement(e, records...) + }) +} + +func (g *Graph) GetNodeByName(name string) (*Node, bool) { + for _, n := range g.Nodes { + if n.Name == name { + return n, true + } + } + return nil, false +} + +func (g *Graph) GetEdgeByName(name string) (*Edge, bool) { + for _, e := range g.Edges { + if e.Name == name { + return e, true + } + } + return nil, false +} + +func (g *Graph) importError(err error, formatWithArgs ...any) *errors.ImportError { + return errors.AsOrNewImportError(err, formatWithArgs...).SetGraphName(g.Name) +} diff --git a/pkg/spec/v3/graph_test.go b/pkg/spec/v3/graph_test.go new file mode 100644 index 00000000..7deaa339 --- /dev/null +++ b/pkg/spec/v3/graph_test.go @@ -0,0 +1,299 @@ +package specv3 + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Graph", func() { + Describe(".Complete", func() { + It("should complete", func() { + graph := NewGraph( + "graphName", + WithGraphNodes( + NewNode( + "node1", + WithNodeID(&NodeID{ + Name: "id1", + Type: ValueTypeString, + }), + ), + ), + WithGraphNodes( + NewNode( + "node2", + WithNodeID(&NodeID{ + Name: "id2", + Type: ValueTypeInt, + }), + ), + ), + WithGraphEdges( + NewEdge( + "edge1", + WithEdgeSrc(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id1", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id1", + Type: ValueTypeInt, + }, + }), + ), + ), + WithGraphEdges( + NewEdge( + "edge2", + WithEdgeSrc(&EdgeNodeRef{ + Name: "node2", + ID: &NodeID{ + Name: "id2", + Type: ValueTypeString, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "node2", + ID: &NodeID{ + Name: "id2", + Type: ValueTypeString, + }, + }), + ), + ), + ) + graph.Complete() + Expect(graph.Name).To(Equal("graphName")) + + Expect(graph.Nodes).To(HaveLen(2)) + Expect(graph.Nodes[0].Name).To(Equal("node1")) + Expect(graph.Nodes[0].ID.Name).To(Equal(strVID)) + Expect(graph.Nodes[1].Name).To(Equal("node2")) + Expect(graph.Nodes[1].ID.Name).To(Equal(strVID)) + + Expect(graph.Edges).To(HaveLen(2)) + Expect(graph.Edges[0].Name).To(Equal("edge1")) + Expect(graph.Edges[0].Src.Name).To(Equal(strSrc)) + Expect(graph.Edges[0].Src.ID.Name).To(Equal(strVID)) + Expect(graph.Edges[0].Dst.Name).To(Equal(strDst)) + Expect(graph.Edges[0].Dst.ID.Name).To(Equal(strVID)) + Expect(graph.Edges[1].Name).To(Equal("edge2")) + Expect(graph.Edges[1].Src.Name).To(Equal(strSrc)) + Expect(graph.Edges[1].Src.ID.Name).To(Equal(strVID)) + Expect(graph.Edges[1].Dst.Name).To(Equal(strDst)) + Expect(graph.Edges[1].Dst.ID.Name).To(Equal(strVID)) + }) + }) + + Describe(".Validate", func() { + It("no name", func() { + graph := NewGraph("") + err := graph.Validate() + Expect(stderrors.Is(err, errors.ErrNoSpaceName)).To(BeTrue()) + }) + + It("nodes validate failed", func() { + graph := NewGraph("graphName", WithGraphNodes(NewNode(""))) + err := graph.Validate() + Expect(stderrors.Is(err, errors.ErrNoNodeName)).To(BeTrue()) + }) + + It("nodes validate failed", func() { + graph := NewGraph("graphName", WithGraphEdges(NewEdge(""))) + err := graph.Validate() + Expect(stderrors.Is(err, errors.ErrNoEdgeName)).To(BeTrue()) + }) + + It("success", func() { + graph := NewGraph( + "graphName", + WithGraphNodes( + NewNode( + "node1", + WithNodeID(&NodeID{ + Name: "id", + Type: ValueTypeInt, + }), + ), + ), + WithGraphEdges( + NewEdge( + "edge1", + WithEdgeSrc(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + }, + }), + ), + ), + ) + err := graph.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Describe(".NodeStatement", func() { + var graph *Graph + BeforeEach(func() { + graph = NewGraph( + "graphName", + WithGraphNodes( + NewNode( + "node1", + WithNodeID(&NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }), + ), + ), + ) + graph.Complete() + err := graph.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("success", func() { + node := graph.Nodes[0] + statement, err := graph.InsertNodeStatement(node, []string{"1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `node1`() VALUES 1:()")) + + b := graph.InsertNodeBuilder(node) + statement, err = b.Build([]string{"1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `node1`() VALUES 1:()")) + }) + + It("failed", func() { + node := graph.Nodes[0] + statement, err := graph.InsertNodeStatement(node, []string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(Equal("")) + + b := graph.InsertNodeBuilder(node) + statement, err = b.Build([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(Equal("")) + }) + }) + + Describe(".EdgeStatement", func() { + var graph *Graph + BeforeEach(func() { + graph = NewGraph( + "graphName", + WithGraphEdges( + NewEdge( + "edge1", + WithEdgeSrc(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + }), + WithEdgeDst(&EdgeNodeRef{ + Name: "node1", + ID: &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 1, + }, + }), + ), + ), + ) + graph.Complete() + err := graph.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("success", func() { + edge := graph.Edges[0] + statement, err := graph.InsertEdgeStatement(edge, []string{"1", "2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `edge1`() VALUES 1->2:()")) + + b := graph.InsertEdgeBuilder(edge) + statement, err = b.Build([]string{"1", "2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT EDGE IGNORE_EXISTED_INDEX `edge1`() VALUES 1->2:()")) + }) + + It("failed", func() { + edge := graph.Edges[0] + statement, err := graph.InsertEdgeStatement(edge, []string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(Equal("")) + + b := graph.InsertEdgeBuilder(edge) + statement, err = b.Build([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(Equal("")) + }) + }) + + Describe("", func() { + var graph *Graph + BeforeEach(func() { + graph = NewGraph("graphName", WithGraphNodes(NewNode("node1"))) + }) + + It("exists", func() { + node, ok := graph.GetNodeByName("node1") + Expect(ok).To(BeTrue()) + Expect(node).NotTo(BeNil()) + Expect(node.Name).To(Equal("node1")) + }) + + It("not exists", func() { + node, ok := graph.GetNodeByName("not-exists") + Expect(ok).To(BeFalse()) + Expect(node).To(BeNil()) + }) + }) + + Describe("", func() { + var graph *Graph + BeforeEach(func() { + graph = NewGraph("graphName", WithGraphEdges(NewEdge("edge1"))) + }) + + It("exists", func() { + edge, ok := graph.GetEdgeByName("edge1") + Expect(ok).To(BeTrue()) + Expect(edge).NotTo(BeNil()) + Expect(edge.Name).To(Equal("edge1")) + }) + + It("not exists", func() { + edge, ok := graph.GetEdgeByName("not-exists") + Expect(ok).To(BeFalse()) + Expect(edge).To(BeNil()) + }) + }) +}) diff --git a/pkg/spec/v3/node.go b/pkg/spec/v3/node.go new file mode 100644 index 00000000..eac24587 --- /dev/null +++ b/pkg/spec/v3/node.go @@ -0,0 +1,148 @@ +package specv3 + +import ( + "fmt" + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/bytebufferpool" + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +type ( + // Node is VERTEX in 3.x + Node struct { + Name string `yaml:"name"` + ID *NodeID `yaml:"id"` + Props Props `yaml:"props,omitempty"` + + IgnoreExistedIndex *bool `yaml:"ignoreExistedIndex,omitempty"` + + insertPrefix string // // "INSERT EDGE name(prop_name, ..., prop_name) VALUES " + } + + Nodes []*Node + + NodeOption func(*Node) +) + +func NewNode(name string, opts ...NodeOption) *Node { + n := &Node{ + Name: name, + } + n.Options(opts...) + + return n +} + +func WithNodeID(id *NodeID) NodeOption { + return func(n *Node) { + n.ID = id + } +} + +func WithNodeProps(props ...*Prop) NodeOption { + return func(n *Node) { + n.Props = append(n.Props, props...) + } +} + +func WithNodeIgnoreExistedIndex(ignore bool) NodeOption { + return func(n *Node) { + n.IgnoreExistedIndex = &ignore + } +} + +func (n *Node) Options(opts ...NodeOption) *Node { + for _, opt := range opts { + opt(n) + } + return n +} + +func (n *Node) Complete() { + if n.ID != nil { + n.ID.Complete() + n.ID.Name = strVID + } + n.Props.Complete() + + // default enable IGNORE_EXISTED_INDEX + insertPrefixFmt := "INSERT VERTEX IGNORE_EXISTED_INDEX %s(%s) VALUES " + if n.IgnoreExistedIndex != nil && !*n.IgnoreExistedIndex { + insertPrefixFmt = "INSERT VERTEX %s(%s) VALUES " + } + n.insertPrefix = fmt.Sprintf( + insertPrefixFmt, + utils.ConvertIdentifier(n.Name), + strings.Join(n.Props.NameList(), ", "), + ) +} + +func (n *Node) Validate() error { + if n.Name == "" { + return n.importError(errors.ErrNoNodeName) + } + + if n.ID == nil { + return n.importError(errors.ErrNoNodeID) + } + + if err := n.ID.Validate(); err != nil { + return n.importError(err) + } + + if err := n.Props.Validate(); err != nil { + return n.importError(err) + } + + return nil +} + +func (n *Node) InsertStatement(records ...Record) (string, error) { + buff := bytebufferpool.Get() + defer bytebufferpool.Put(buff) + + buff.SetString(n.insertPrefix) + + for i, record := range records { + idValue, err := n.ID.Value(record) + if err != nil { + return "", n.importError(err) + } + propsValueList, err := n.Props.ValueList(record) + if err != nil { + return "", n.importError(err) + } + + if i > 0 { + _, _ = buff.WriteString(", ") + } + + // "%s:(%s)" + _, _ = buff.WriteString(idValue) + _, _ = buff.WriteString(":(") + _, _ = buff.WriteStringSlice(propsValueList, ", ") + _, _ = buff.WriteString(")") + } + return buff.String(), nil +} + +func (n *Node) importError(err error, formatWithArgs ...any) *errors.ImportError { //nolint:unparam + return errors.AsOrNewImportError(err, formatWithArgs...).SetNodeName(n.Name) +} + +func (ns Nodes) Complete() { + for i := range ns { + ns[i].Complete() + } +} + +func (ns Nodes) Validate() error { + for i := range ns { + if err := ns[i].Validate(); err != nil { + return err + } + } + return nil +} diff --git a/pkg/spec/v3/node_test.go b/pkg/spec/v3/node_test.go new file mode 100644 index 00000000..7e27d4d3 --- /dev/null +++ b/pkg/spec/v3/node_test.go @@ -0,0 +1,313 @@ +package specv3 + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Node", func() { + Describe(".Complete", func() { + It("should complete", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{ + Name: "id", + Type: ValueTypeInt, + }), + WithNodeProps(&Prop{Name: "prop1", Type: ValueTypeString}), + WithNodeProps(&Prop{Name: "prop2", Type: ValueTypeInt}), + ) + node.Complete() + Expect(node.Validate()).NotTo(HaveOccurred()) + + Expect(node.Name).To(Equal("name")) + + Expect(node.ID.Name).To(Equal("vid")) + Expect(node.ID.Type).To(Equal(ValueTypeInt)) + + Expect(node.Props).To(HaveLen(2)) + Expect(node.Props[0].Name).To(Equal("prop1")) + Expect(node.Props[0].Type).To(Equal(ValueTypeString)) + Expect(node.Props[1].Name).To(Equal("prop2")) + Expect(node.Props[1].Type).To(Equal(ValueTypeInt)) + }) + }) + + Describe(".Validate", func() { + It("no name", func() { + node := NewNode("") + err := node.Validate() + Expect(stderrors.Is(err, errors.ErrNoNodeName)).To(BeTrue()) + }) + + It("no id", func() { + node := NewNode("name") + err := node.Validate() + Expect(stderrors.Is(err, errors.ErrNoNodeID)).To(BeTrue()) + }) + + It("id validate failed", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: "unsupported"}), + ) + err := node.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + }) + + It("props validate failed", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt}), + WithNodeProps(&Prop{Name: "prop", Type: "unsupported"}), + ) + err := node.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + }) + + It("success without props", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt}), + ) + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("success with props", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt}), + WithNodeProps(&Prop{Name: "prop", Type: ValueTypeString}), + ) + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + Describe(".InsertStatement", func() { + When("no props", func() { + var node *Node + BeforeEach(func() { + node = NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt, Index: 0}), + ) + node.Complete() + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`() VALUES 1:()")) + }) + + It("two record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}, []string{"2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`() VALUES 1:(), 2:()")) + }) + + It("failed id no record", func() { + statement, err := node.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("one prop", func() { + var node *Node + BeforeEach(func() { + node = NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt, Index: 0}), + WithNodeProps( + &Prop{Name: "prop1", Type: ValueTypeString, Index: 2}, + ), + ) + node.Complete() + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`(`prop1`) VALUES 1:(\"str1\")")) + }) + + It("two record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}, []string{"2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`(`prop1`) VALUES 1:(\"str1\"), 2:(\"str2\")")) + }) + + It("failed id no record", func() { + statement, err := node.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("failed prop no record", func() { + statement, err := node.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + + When("many props", func() { + var node *Node + BeforeEach(func() { + node = NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt, Index: 0}), + WithNodeProps( + &Prop{Name: "prop1", Type: ValueTypeString, Index: 2}, + &Prop{Name: "prop2", Type: ValueTypeDouble, Index: 1}, + ), + ) + node.Complete() + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + + It("one record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1:(\"str1\", 1.1)")) + }) + + It("two record", func() { + statement, err := node.InsertStatement([]string{"1", "1.1", "str1"}, []string{"2", "2.2", "str2"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`(`prop1`, `prop2`) VALUES 1:(\"str1\", 1.1), 2:(\"str2\", 2.2)")) + }) + + It("failed id no record", func() { + statement, err := node.InsertStatement([]string{}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + + It("failed prop no record", func() { + statement, err := node.InsertStatement([]string{"1"}) + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrNoRecord)).To(BeTrue()) + Expect(statement).To(BeEmpty()) + }) + }) + }) + + When("WithNodeIgnoreExistedIndex", func() { + It("WithNodeIgnoreExistedIndex false", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt, Index: 0}), + WithNodeIgnoreExistedIndex(false), + ) + node.Complete() + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + + statement, err := node.InsertStatement([]string{"1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX `name`() VALUES 1:()")) + }) + It("WithNodeIgnoreExistedIndex true", func() { + node := NewNode( + "name", + WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt, Index: 0}), + WithNodeIgnoreExistedIndex(true), + ) + node.Complete() + err := node.Validate() + Expect(err).NotTo(HaveOccurred()) + + statement, err := node.InsertStatement([]string{"1"}) + Expect(err).NotTo(HaveOccurred()) + Expect(statement).To(Equal("INSERT VERTEX IGNORE_EXISTED_INDEX `name`() VALUES 1:()")) + }) + }) +}) + +var _ = Describe("Nodes", func() { + Describe(".Complete", func() { + It("default value type", func() { + nodes := Nodes{ + NewNode("name1", WithNodeID(&NodeID{}), WithNodeProps(&Prop{})), + NewNode("name2", WithNodeID(&NodeID{}), WithNodeProps(&Prop{})), + } + nodes.Complete() + Expect(nodes).To(HaveLen(2)) + Expect(nodes[0].Name).To(Equal("name1")) + Expect(nodes[1].Name).To(Equal("name2")) + }) + }) + + DescribeTable(".Validate", + func(nodes Nodes, failedIndex int) { + err := nodes.Validate() + if failedIndex >= 0 { + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(nodes[failedIndex].Validate())) + } else { + Expect(err).NotTo(HaveOccurred()) + } + }, + Entry("empty nodes", + Nodes{}, + -1, + ), + Entry("success", + Nodes{ + NewNode("name1", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name2", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name3", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name4", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + }, + -1, + ), + Entry("failed at 0", + Nodes{ + NewNode(""), + NewNode("name1", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name2", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name3", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name4", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + }, + 0, + ), + Entry("failed at 1", + Nodes{ + NewNode("name1", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("failed"), + NewNode("name2", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name3", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name4", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + }, + 1, + ), + Entry("failed at end", + Nodes{ + NewNode("name1", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name2", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name3", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("name4", WithNodeID(&NodeID{Name: "id", Type: ValueTypeInt})), + NewNode("failed", WithNodeID(&NodeID{})), + }, + 4, + ), + ) +}) diff --git a/pkg/spec/v3/nodeid.go b/pkg/spec/v3/nodeid.go new file mode 100644 index 00000000..c84fb870 --- /dev/null +++ b/pkg/spec/v3/nodeid.go @@ -0,0 +1,86 @@ +package specv3 + +import ( + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/picker" +) + +var supportedNodeIDFunctions = map[string]struct{}{ + "HASH": {}, +} + +type ( + // NodeID is the VID in 3.x + NodeID struct { + Name string `yaml:"-"` + Type ValueType `yaml:"type"` + Index int `yaml:"index"` + ConcatItems []interface{} `yaml:"concatItems,omitempty"` // only support string and int, string for constant, int is for Index + Function *string `yaml:"function"` + + picker picker.Picker + } +) + +func IsSupportedNodeIDFunction(function string) bool { + _, ok := supportedNodeIDFunctions[strings.ToUpper(function)] + return ok +} + +func (id *NodeID) Complete() { + if id.Type == "" { + id.Type = ValueTypeDefault + } +} + +func (id *NodeID) Validate() error { + if id.Name == "" { + return id.importError(errors.ErrNoNodeIDName) + } + if !IsSupportedNodeIDValueType(id.Type) { + return id.importError(errors.ErrUnsupportedValueType, "unsupported type %s", id.Type) + } + if id.Function != nil && !IsSupportedNodeIDFunction(*id.Function) { + return id.importError(errors.ErrUnsupportedFunction, "unsupported function %s", *id.Function) + } + if err := id.initPicker(); err != nil { + return id.importError(err, "init picker failed") + } + + return nil +} + +func (id *NodeID) Value(record Record) (string, error) { + val, err := id.picker.Pick(record) + if err != nil { + if len(id.ConcatItems) > 0 { + return "", id.importError(err, "record concat items %v pick failed", id.ConcatItems).SetRecord(record) + } + return "", id.importError(err, "record index %d pick failed", id.Index).SetRecord(record) + } + defer val.Release() + return val.Val, nil +} + +func (id *NodeID) initPicker() error { + pickerConfig := picker.Config{ + Type: string(id.Type), + Function: id.Function, + } + + if len(id.ConcatItems) > 0 { + pickerConfig.ConcatItems = id.ConcatItems + } else { + pickerConfig.Indices = []int{id.Index} + } + + var err error + id.picker, err = pickerConfig.Build() + return err +} + +func (id *NodeID) importError(err error, formatWithArgs ...any) *errors.ImportError { + return errors.AsOrNewImportError(err, formatWithArgs...).SetPropName(id.Name) +} diff --git a/pkg/spec/v3/nodeid_test.go b/pkg/spec/v3/nodeid_test.go new file mode 100644 index 00000000..9cea7e61 --- /dev/null +++ b/pkg/spec/v3/nodeid_test.go @@ -0,0 +1,169 @@ +package specv3 + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("NodeID", func() { + Describe(".Complete", func() { + It("empty prop", func() { + nodeID := &NodeID{} + nodeID.Complete() + Expect(nodeID.Type).To(Equal(ValueTypeDefault)) + }) + }) + + Describe(".Validate", func() { + It("failed", func() { + nodeID := &NodeID{Name: "id", Type: "unsupported"} + err := nodeID.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + }) + + It("success", func() { + nodeID := &NodeID{Name: "id", Type: ValueTypeDefault} + err := nodeID.Validate() + Expect(err).NotTo(HaveOccurred()) + }) + }) + + DescribeTable(".Value", + func(nodeID *NodeID, record Record, expectValue string, expectErr error) { + if nodeID.Function != nil { + patches := gomonkey.NewPatches() + defer patches.Reset() + patches.ApplyGlobalVar(&supportedNodeIDFunctions, map[string]struct{}{ + "HASH": {}, + }) + } + + val, err := func() (string, error) { + nodeID.Complete() + err := nodeID.Validate() + if err != nil { + return "", err + } + return nodeID.Value(record) + }() + + if expectErr != nil { + if Expect(err).To(HaveOccurred()) { + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + e, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(e.Cause()).To(Equal(expectErr)) + Expect(e.PropName()).To(Equal(nodeID.Name)) + } + Expect(val).To(Equal(expectValue)) + } else { + Expect(err).NotTo(HaveOccurred()) + Expect(val).To(Equal(expectValue)) + } + }, + Entry("no record empty", + &NodeID{Name: "id"}, + Record([]string{}), + "", + errors.ErrNoRecord, + ), + Entry("no record", + &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 1, + }, + Record([]string{"0"}), + "", + errors.ErrNoRecord, + ), + Entry("record int", + &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("record string", + &NodeID{ + Name: "id", + Type: ValueTypeString, + Index: 0, + }, + Record([]string{"id"}), + "\"id\"", + nil, + ), + Entry("ConcatItems", + &NodeID{ + Name: "id", + Type: ValueTypeString, + ConcatItems: []interface{}{"c1", 3, "c2", 1, 2, "c3", 0}, + }, + Record([]string{"s0", "s1", "s2", "s3"}), + "\"c1s3c2s1s2c3s0\"", + nil, + ), + Entry("ConcatItems failed type", + &NodeID{ + Name: "id", + Type: ValueTypeString, + ConcatItems: []interface{}{true}, + }, + Record([]string{"1"}), + "", + errors.ErrUnsupportedConcatItemType, + ), + Entry("ConcatItems failed no record", + &NodeID{ + Name: "id", + Type: ValueTypeString, + ConcatItems: []interface{}{"c1", 3, "c2", 1, 2, "c3", 0, 10}, + }, + Record([]string{"s0", "s1", "s2", "s3"}), + "", + errors.ErrNoRecord, + ), + Entry("Function", + &NodeID{ + Name: "id", + Type: ValueTypeInt, + ConcatItems: []interface{}{"c1", 3, "c2", 1, 2, "c3", 0}, + Function: func() *string { s := "hash"; return &s }(), + }, + Record([]string{"s0", "s1", "s2", "s3"}), + "hash(\"c1s3c2s1s2c3s0\")", + nil, + ), + Entry("unsupported value type", + &NodeID{ + Name: "id", + Type: ValueTypeDouble, + Index: 0, + }, + Record([]string{"1.1"}), + nil, + errors.ErrUnsupportedValueType, + ), + Entry("unsupported function", + &NodeID{ + Name: "id", + Type: ValueTypeInt, + Index: 0, + Function: func() *string { s := "unsupported"; return &s }(), + }, + Record([]string{"1"}), + nil, + errors.ErrUnsupportedFunction, + ), + ) +}) diff --git a/pkg/spec/v3/prop.go b/pkg/spec/v3/prop.go new file mode 100644 index 00000000..d70eef42 --- /dev/null +++ b/pkg/spec/v3/prop.go @@ -0,0 +1,121 @@ +package specv3 + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/picker" + "github.com/vesoft-inc/nebula-importer/v4/pkg/utils" +) + +type ( + Prop struct { + Name string `yaml:"name"` + Type ValueType `yaml:"type"` + Index int `yaml:"index"` + Nullable bool `yaml:"nullable"` + NullValue string `yaml:"nullValue"` + AlternativeIndices []int `yaml:"alternativeIndices,omitempty"` + DefaultValue *string `yaml:"defaultValue"` + + convertedName string + picker picker.Picker + } + + Props []*Prop +) + +func (p *Prop) Complete() { + p.convertedName = utils.ConvertIdentifier(p.Name) + if p.Type == "" { + p.Type = ValueTypeDefault + } +} + +func (p *Prop) Validate() error { + if p.Name == "" { + return p.importError(errors.ErrNoPropName) + } + if !IsSupportedPropValueType(p.Type) { + return p.importError(errors.ErrUnsupportedValueType, "unsupported type %s", p.Type) + } + if err := p.initPicker(); err != nil { + return p.importError(err, "init picker failed") + } + return nil +} + +func (p *Prop) Value(record Record) (string, error) { + val, err := p.picker.Pick(record) + if err != nil { + return "", p.importError(err, "record index %d pick failed", p.Index).SetRecord(record) + } + defer val.Release() + return val.Val, nil +} + +func (p *Prop) initPicker() error { + pickerConfig := picker.Config{ + Indices: []int{p.Index}, + Type: string(p.Type), + } + + if p.Nullable { + pickerConfig.Nullable = func(s string) bool { + return s == p.NullValue + } + pickerConfig.NullValue = dbNULL + if len(p.AlternativeIndices) > 0 { + pickerConfig.Indices = append(pickerConfig.Indices, p.AlternativeIndices...) + } + pickerConfig.DefaultValue = p.DefaultValue + } + + var err error + p.picker, err = pickerConfig.Build() + return err +} + +func (p *Prop) importError(err error, formatWithArgs ...any) *errors.ImportError { + return errors.AsOrNewImportError(err, formatWithArgs...).SetPropName(p.Name) +} + +func (ps Props) Complete() { + for i := range ps { + ps[i].Complete() + } +} + +func (ps Props) Validate() error { + for i := range ps { + if err := ps[i].Validate(); err != nil { + return err + } + } + return nil +} + +func (ps Props) ValueList(record Record) ([]string, error) { + valueList := make([]string, 0, len(ps)) + for _, prop := range ps { + value, err := prop.Value(record) + if err != nil { + return nil, err + } + valueList = append(valueList, value) + } + return valueList, nil +} + +func (ps Props) NameList() []string { + nameList := make([]string, len(ps)) + for i := range ps { + nameList[i] = ps[i].convertedName + } + return nameList +} + +func (ps Props) Append(props ...*Prop) Props { + cpy := make(Props, len(ps)+len(props)) + copy(cpy, ps) + copy(cpy[len(ps):], props) + return cpy +} diff --git a/pkg/spec/v3/prop_test.go b/pkg/spec/v3/prop_test.go new file mode 100644 index 00000000..18bf43da --- /dev/null +++ b/pkg/spec/v3/prop_test.go @@ -0,0 +1,447 @@ +package specv3 + +import ( + stderrors "errors" + "strings" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Prop", func() { + Describe(".Complete", func() { + It("no value type", func() { + prop := &Prop{} + prop.Complete() + Expect(prop.Type).To(Equal(ValueTypeDefault)) + + prop = &Prop{ + Type: ValueTypeInt, + } + prop.Complete() + Expect(prop.Type).To(Equal(ValueTypeInt)) + }) + It("have value type", func() { + prop := &Prop{ + Type: ValueTypeInt, + } + prop.Complete() + Expect(prop.Type).To(Equal(ValueTypeInt)) + }) + }) + + DescribeTable(".Validate", + func(prop *Prop, expectErr error) { + err := prop.Validate() + if expectErr != nil { + if Expect(err).To(HaveOccurred()) { + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + e, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(e.Cause()).To(Equal(expectErr)) + Expect(e.PropName()).To(Equal(prop.Name)) + } + } else { + Expect(err).NotTo(HaveOccurred()) + } + }, + Entry("no prop name", &Prop{}, errors.ErrNoPropName), + Entry("unsupported value type", &Prop{Name: "a", Type: "x"}, errors.ErrUnsupportedValueType), + Entry("supported value type", &Prop{Name: "a", Type: ValueTypeDefault}, nil), + ) + + It(".Validate init picker failed", func() { + prop := &Prop{Name: "a", Type: "unsupported"} + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyGlobalVar(&supportedPropValueTypes, map[ValueType]struct{}{ + ValueType(strings.ToUpper(string(prop.Type))): {}, + }) + + err := prop.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrUnsupportedValueType)).To(BeTrue()) + }) + + DescribeTable(".Value", + func(p *Prop, record Record, expectValue string, expectErr error) { + val, err := func() (string, error) { + p.Complete() + err := p.Validate() + if err != nil { + return "", err + } + return p.Value(record) + }() + if expectErr != nil { + if Expect(err).To(HaveOccurred()) { + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + e, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(e.Cause()).To(Equal(expectErr)) + Expect(e.PropName()).To(Equal(p.Name)) + } + Expect(val).To(Equal(expectValue)) + } else { + Expect(err).NotTo(HaveOccurred()) + Expect(val).To(Equal(expectValue)) + } + }, + Entry("no record empty", + &Prop{Name: "p1"}, + Record([]string{}), + "", + errors.ErrNoRecord, + ), + Entry("no record", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 1, + }, + Record([]string{"0"}), + "", + errors.ErrNoRecord, + ), + Entry("record int", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("record string", + &Prop{ + Name: "p1", + Type: ValueTypeString, + Index: 0, + }, + Record([]string{"str"}), + "\"str\"", + nil, + ), + Entry("record double", + &Prop{ + Name: "p1", + Type: ValueTypeDouble, + Index: 0, + }, + Record([]string{"1.1"}), + "1.1", + nil, + ), + Entry("Nullable", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + }, + Record([]string{""}), + "NULL", + nil, + ), + Entry("Nullable not null", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("NullValue", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + }, + Record([]string{"N/A"}), + "NULL", + nil, + ), + Entry("NullValue not null", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("AlternativeIndices 0", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{}, + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("AlternativeIndices 1 pick index", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{1}, + }, + Record([]string{"1"}), + "1", + nil, + ), + Entry("AlternativeIndices 1 pick failed", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{1}, + }, + Record([]string{"N/A"}), + "", + errors.ErrNoRecord, + ), + Entry("AlternativeIndices 1 pick 1", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{1}, + }, + Record([]string{"N/A", "1"}), + "1", + nil, + ), + Entry("AlternativeIndices 1 pick null", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{1}, + }, + Record([]string{"N/A", "N/A"}), + "NULL", + nil, + ), + Entry("AlternativeIndices 1 pick default", + &Prop{ + Name: "p1", + Type: ValueTypeInt, + Index: 0, + Nullable: true, + NullValue: "N/A", + AlternativeIndices: []int{1}, + DefaultValue: func() *string { s := "0"; return &s }(), + }, + Record([]string{"N/A", "N/A"}), + "0", + nil, + ), + Entry("unsupported value type", + &Prop{ + Name: "p1", + Type: "unsupported", + }, + Record([]string{"1"}), + nil, + errors.ErrUnsupportedValueType, + ), + ) +}) + +var _ = Describe("Props", func() { + Describe(".Complete", func() { + It("default value type", func() { + prop1 := Prop{} + prop2 := Prop{ + Type: ValueTypeInt, + } + prop3 := Prop{ + Type: ValueTypeDouble, + } + p1, p2, p3 := prop1, prop2, prop3 + props := Props{&p1, &p2, &p3} + props.Complete() + Expect(props).To(HaveLen(3)) + + p1.Complete() + Expect(props[0]).To(Equal(&p1)) + p2.Complete() + Expect(props[1]).To(Equal(&p2)) + p3.Complete() + Expect(props[2]).To(Equal(&p3)) + }) + }) + + DescribeTable(".Validate", + func(props Props, failedIndex int) { + err := props.Validate() + if failedIndex >= 0 { + Expect(err).To(HaveOccurred()) + Expect(err).To(Equal(props[failedIndex].Validate())) + } else { + Expect(err).NotTo(HaveOccurred()) + } + }, + Entry("empty props", + Props{}, + -1, + ), + Entry("success", + Props{ + &Prop{Name: "a", Type: ValueTypeInt}, + &Prop{Name: "b", Type: ValueTypeString}, + &Prop{Name: "c", Type: ValueTypeDouble}, + }, + -1, + ), + Entry("failed at 0", + Props{ + &Prop{Name: ""}, + &Prop{Name: "a", Type: ValueTypeInt}, + &Prop{Name: "b", Type: ValueTypeString}, + &Prop{Name: "c", Type: ValueTypeDouble}, + }, + 0, + ), + Entry("failed at 1", + Props{ + &Prop{Name: "a", Type: ValueTypeInt}, + &Prop{Name: "failed"}, + &Prop{Name: "b", Type: ValueTypeString}, + &Prop{Name: "c", Type: ValueTypeDouble}, + }, + 1, + ), + Entry("failed at end", + Props{ + &Prop{Name: "a", Type: ValueTypeInt}, + &Prop{Name: "b", Type: ValueTypeString}, + &Prop{Name: "c", Type: ValueTypeDouble}, + &Prop{Name: "failed"}, + }, + 3, + ), + ) + + DescribeTable(".ValueList", + func(props Props, record Record, expectValueList []string, failedIndex int) { + valueList, err := func() ([]string, error) { + props.Complete() + if err := props.Validate(); err != nil { + return nil, err + } + return props.ValueList(record) + }() + if failedIndex >= 0 { + if Expect(err).To(HaveOccurred()) { + _, expectErr := props[failedIndex].Value(record) + Expect(err).To(Equal(expectErr)) + } + Expect(valueList).To(Equal(expectValueList)) + } else { + Expect(err).NotTo(HaveOccurred()) + Expect(valueList).To(Equal(expectValueList)) + } + }, + Entry("empty props", + Props{}, + []string{"1", "1.1", "str"}, + []string{}, + -1, + ), + Entry("success", + Props{ + &Prop{Name: "a", Type: ValueTypeInt, Index: 0}, + &Prop{Name: "b", Type: ValueTypeString, Index: 2}, + &Prop{Name: "c", Type: ValueTypeDouble, Index: 1}, + }, + []string{"1", "1.1", "str"}, + []string{"1", "\"str\"", "1.1"}, + -1, + ), + Entry("failed", + Props{ + &Prop{Name: "a", Type: ValueTypeInt, Index: 0}, + }, + nil, + nil, + 0, + ), + ) + + DescribeTable(".NameList", + func(props Props, expectValueList []string) { + props.Complete() + valueList := props.NameList() + Expect(valueList).To(Equal(expectValueList)) + }, + Entry("empty props", + Props{}, + []string{}, + ), + Entry("one", + Props{ + &Prop{Name: "a", Type: ValueTypeInt, Index: 0}, + }, + []string{"`a`"}, + ), + Entry("many", + Props{ + &Prop{Name: "a", Type: ValueTypeInt, Index: 0}, + &Prop{Name: "b", Type: ValueTypeString, Index: 2}, + &Prop{Name: "c", Type: ValueTypeDouble, Index: 1}, + }, + []string{"`a`", "`b`", "`c`"}, + ), + ) + + DescribeTable(".Append", + func(l, r Props) { + lLen, rLen := len(l), len(r) + props := l.Append(r...) + Expect(props).To(HaveLen(lLen + rLen)) + }, + Entry("nil + nil", + nil, + nil, + ), + Entry("nil + non-nil", + nil, + Props{&Prop{}}, + ), + Entry("non-nil + nil", + Props{&Prop{}, &Prop{}}, + nil, + ), + Entry("non-nil + non-nil", + Props{&Prop{}, &Prop{}, &Prop{}}, + Props{&Prop{}, &Prop{}, &Prop{}, &Prop{}}, + ), + ) +}) diff --git a/pkg/spec/v3/rank.go b/pkg/spec/v3/rank.go new file mode 100644 index 00000000..61a97ab0 --- /dev/null +++ b/pkg/spec/v3/rank.go @@ -0,0 +1,48 @@ +package specv3 + +import ( + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + "github.com/vesoft-inc/nebula-importer/v4/pkg/picker" +) + +type ( + Rank struct { + Index int `yaml:"index"` + + picker picker.Picker + } +) + +func (*Rank) Complete() {} + +func (r *Rank) Validate() error { + //revive:disable-next-line:if-return + if err := r.initPicker(); err != nil { + return r.importError(err, "init picker failed") + } + return nil +} + +func (r *Rank) Value(record Record) (string, error) { + val, err := r.picker.Pick(record) + if err != nil { + return "", r.importError(err, "record index %d pick failed", r.Index).SetRecord(record) + } + defer val.Release() + return val.Val, nil +} + +func (r *Rank) initPicker() error { + pickerConfig := picker.Config{ + Indices: []int{r.Index}, + Type: string(ValueTypeInt), + } + + var err error + r.picker, err = pickerConfig.Build() + return err +} + +func (*Rank) importError(err error, formatWithArgs ...any) *errors.ImportError { + return errors.AsOrNewImportError(err, formatWithArgs...) +} diff --git a/pkg/spec/v3/rank_test.go b/pkg/spec/v3/rank_test.go new file mode 100644 index 00000000..6baec420 --- /dev/null +++ b/pkg/spec/v3/rank_test.go @@ -0,0 +1,86 @@ +package specv3 + +import ( + stderrors "errors" + + "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Rank", func() { + It(".Complete", func() { + prop := &Rank{} + prop.Complete() + }) + + DescribeTable(".Validate", + func(rank *Rank, expectErr error) { + err := rank.Validate() + if expectErr != nil { + if Expect(err).To(HaveOccurred()) { + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + e, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(e.Cause()).To(Equal(expectErr)) + } + } else { + Expect(err).NotTo(HaveOccurred()) + } + }, + Entry("", &Rank{Index: -1}, errors.ErrInvalidIndex), + Entry("normal", &Rank{Index: 0}, nil), + ) + + It(".Validate init picker failed", func() { + rank := Rank{Index: -1} + + err := rank.Validate() + Expect(err).To(HaveOccurred()) + Expect(stderrors.Is(err, errors.ErrInvalidIndex)).To(BeTrue()) + }) + + DescribeTable(".Value", + func(rank *Rank, record Record, expectValue string, expectErr error) { + val, err := func() (string, error) { + rank.Complete() + err := rank.Validate() + if err != nil { + return "", err + } + return rank.Value(record) + }() + if expectErr != nil { + if Expect(err).To(HaveOccurred()) { + Expect(stderrors.Is(err, expectErr)).To(BeTrue()) + e, ok := errors.AsImportError(err) + Expect(ok).To(BeTrue()) + Expect(e.Cause()).To(Equal(expectErr)) + } + Expect(val).To(Equal(expectValue)) + } else { + Expect(err).NotTo(HaveOccurred()) + Expect(val).To(Equal(expectValue)) + } + }, + Entry("no record empty", + &Rank{Index: 0}, + Record([]string{}), + "", + errors.ErrNoRecord, + ), + Entry("no record", + &Rank{Index: 1}, + Record([]string{"0"}), + "", + errors.ErrNoRecord, + ), + Entry("successfully", + &Rank{Index: 1}, + Record([]string{"1", "11"}), + "11", + nil, + ), + ) +}) diff --git a/pkg/spec/v3/record.go b/pkg/spec/v3/record.go new file mode 100644 index 00000000..e5d6b500 --- /dev/null +++ b/pkg/spec/v3/record.go @@ -0,0 +1,8 @@ +package specv3 + +import specbase "github.com/vesoft-inc/nebula-importer/v4/pkg/spec/base" + +type ( + Record = specbase.Record + Records = specbase.Records +) diff --git a/pkg/spec/v3/spec.go b/pkg/spec/v3/spec.go new file mode 100644 index 00000000..9599891c --- /dev/null +++ b/pkg/spec/v3/spec.go @@ -0,0 +1,7 @@ +package specv3 + +const ( + strVID = "vid" + strSrc = "src" + strDst = "dst" +) diff --git a/pkg/spec/v3/spec_suite_test.go b/pkg/spec/v3/spec_suite_test.go new file mode 100644 index 00000000..ad37fc53 --- /dev/null +++ b/pkg/spec/v3/spec_suite_test.go @@ -0,0 +1,13 @@ +package specv3 + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestSpec(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg spec v3 Suite") +} diff --git a/pkg/spec/v3/value.go b/pkg/spec/v3/value.go new file mode 100644 index 00000000..f0b340e9 --- /dev/null +++ b/pkg/spec/v3/value.go @@ -0,0 +1,69 @@ +package specv3 + +import ( + "strings" +) + +const ( + dbNULL = "NULL" + + ValueTypeBool ValueType = "BOOL" + ValueTypeInt ValueType = "INT" + ValueTypeString ValueType = "STRING" + ValueTypeFloat ValueType = "FLOAT" + ValueTypeDouble ValueType = "DOUBLE" + ValueTypeDate ValueType = "DATE" + ValueTypeDateTime ValueType = "DATETIME" + ValueTypeTimestamp ValueType = "TIMESTAMP" + ValueTypeGeo ValueType = "GEOGRAPHY" + ValueTypeGeoPoint ValueType = "GEOGRAPHY(POINT)" + ValueTypeGeoLineString ValueType = "GEOGRAPHY(LINESTRING)" + ValueTypeGeoPolygon ValueType = "GEOGRAPHY(POLYGON)" + + ValueTypeDefault = ValueTypeString +) + +var ( + supportedPropValueTypes = map[ValueType]struct{}{ + ValueTypeBool: {}, + ValueTypeInt: {}, + ValueTypeString: {}, + ValueTypeFloat: {}, + ValueTypeDouble: {}, + ValueTypeDate: {}, + ValueTypeDateTime: {}, + ValueTypeTimestamp: {}, + ValueTypeGeo: {}, + ValueTypeGeoPoint: {}, + ValueTypeGeoLineString: {}, + ValueTypeGeoPolygon: {}, + } + + supportedNodeIDValueTypes = map[ValueType]struct{}{ + ValueTypeInt: {}, + ValueTypeString: {}, + } +) + +type ValueType string + +func IsSupportedPropValueType(t ValueType) bool { + _, ok := supportedPropValueTypes[ValueType(strings.ToUpper(t.String()))] + return ok +} + +func IsSupportedNodeIDValueType(t ValueType) bool { + _, ok := supportedNodeIDValueTypes[ValueType(strings.ToUpper(t.String()))] + return ok +} + +func (t ValueType) Equal(vt ValueType) bool { + if !IsSupportedPropValueType(t) || !IsSupportedPropValueType(vt) { + return false + } + return strings.EqualFold(t.String(), vt.String()) +} + +func (t ValueType) String() string { + return string(t) +} diff --git a/pkg/spec/v3/value_test.go b/pkg/spec/v3/value_test.go new file mode 100644 index 00000000..f9d11968 --- /dev/null +++ b/pkg/spec/v3/value_test.go @@ -0,0 +1,70 @@ +package specv3 + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Value", func() { + DescribeTable("IsSupportedPropValueType", + func(t ValueType, expectIsSupported bool) { + Expect(IsSupportedPropValueType(t)).To(Equal(expectIsSupported)) + }, + EntryDescription("%[1]s -> %[2]t"), + Entry(nil, ValueTypeBool, true), + Entry(nil, ValueTypeInt, true), + Entry(nil, ValueTypeString, true), + Entry(nil, ValueTypeFloat, true), + Entry(nil, ValueTypeDouble, true), + Entry(nil, ValueTypeDate, true), + Entry(nil, ValueTypeDateTime, true), + Entry(nil, ValueTypeTimestamp, true), + Entry(nil, ValueTypeGeo, true), + Entry(nil, ValueTypeGeoPoint, true), + Entry(nil, ValueTypeGeoLineString, true), + Entry(nil, ValueTypeGeoPolygon, true), + Entry(nil, ValueType("int"), true), + Entry(nil, ValueType("inT"), true), + Entry(nil, ValueType("iNt"), true), + Entry(nil, ValueType("InT"), true), + Entry(nil, ValueType("unsupported"), false), + ) + + DescribeTable("IsSupportedNodeIDValueType", + func(t ValueType, expectIsSupported bool) { + Expect(IsSupportedNodeIDValueType(t)).To(Equal(expectIsSupported)) + }, + EntryDescription("%[1]s -> %[2]t"), + Entry(nil, ValueTypeBool, false), + Entry(nil, ValueTypeInt, true), + Entry(nil, ValueTypeString, true), + Entry(nil, ValueTypeFloat, false), + Entry(nil, ValueTypeDouble, false), + Entry(nil, ValueTypeDate, false), + Entry(nil, ValueTypeDateTime, false), + Entry(nil, ValueTypeTimestamp, false), + Entry(nil, ValueTypeGeo, false), + Entry(nil, ValueTypeGeoPoint, false), + Entry(nil, ValueTypeGeoLineString, false), + Entry(nil, ValueTypeGeoPolygon, false), + Entry(nil, ValueType("int"), true), + Entry(nil, ValueType("inT"), true), + Entry(nil, ValueType("iNt"), true), + Entry(nil, ValueType("InT"), true), + Entry(nil, ValueType("unsupported"), false), + ) + + DescribeTable(".Equal", + func(t, vt ValueType, expectIsSupported bool) { + Expect(t.Equal(vt)).To(Equal(expectIsSupported)) + }, + EntryDescription("%[1]s == %[2]s ? %[3]t"), + Entry(nil, ValueTypeInt, ValueType("int"), true), + Entry(nil, ValueTypeInt, ValueType("inT"), true), + Entry(nil, ValueTypeInt, ValueType("iNt"), true), + Entry(nil, ValueTypeInt, ValueType("InT"), true), + Entry(nil, ValueTypeInt, ValueType("unsupported"), false), + Entry(nil, ValueType("unsupported"), ValueTypeInt, false), + Entry(nil, ValueType("unsupported"), ValueType("unsupported"), false), + ) +}) diff --git a/pkg/stats/concurrency_stats.go b/pkg/stats/concurrency_stats.go new file mode 100644 index 00000000..87073ae8 --- /dev/null +++ b/pkg/stats/concurrency_stats.go @@ -0,0 +1,74 @@ +package stats + +import ( + "sync" + "time" +) + +type ( + ConcurrencyStats struct { + s Stats + mu sync.Mutex + initOne sync.Once + } +) + +func NewConcurrencyStats() *ConcurrencyStats { + return &ConcurrencyStats{} +} + +func (s *ConcurrencyStats) Init() { + s.initOne.Do(func() { + s.s.StartTime = time.Now() + }) +} + +func (s *ConcurrencyStats) AddTotalBytes(nBytes int64) { + s.mu.Lock() + s.s.TotalBytes += nBytes + s.mu.Unlock() +} + +func (s *ConcurrencyStats) Failed(nBytes, nRecords int64) { + s.mu.Lock() + s.s.ProcessedBytes += nBytes + s.s.FailedRecords += nRecords + s.s.TotalRecords += nRecords + s.mu.Unlock() +} + +func (s *ConcurrencyStats) Succeeded(nBytes, nRecords int64) { + s.mu.Lock() + s.s.ProcessedBytes += nBytes + s.s.TotalRecords += nRecords + s.mu.Unlock() +} + +func (s *ConcurrencyStats) RequestFailed(nRecords int64) { + s.mu.Lock() + s.s.FailedRequest++ + s.s.TotalRequest++ + s.s.FailedProcessed += nRecords + s.s.TotalProcessed += nRecords + s.mu.Unlock() +} + +func (s *ConcurrencyStats) RequestSucceeded(nRecords int64, latency, respTime time.Duration) { + s.mu.Lock() + s.s.TotalRequest++ + s.s.TotalLatency += latency + s.s.TotalRespTime += respTime + s.s.TotalProcessed += nRecords + s.mu.Unlock() +} + +func (s *ConcurrencyStats) Stats() *Stats { + s.mu.Lock() + cpy := s.s + s.mu.Unlock() + return &cpy +} + +func (s *ConcurrencyStats) String() string { + return s.Stats().String() +} diff --git a/pkg/stats/concurrency_stats_test.go b/pkg/stats/concurrency_stats_test.go new file mode 100644 index 00000000..b8a20333 --- /dev/null +++ b/pkg/stats/concurrency_stats_test.go @@ -0,0 +1,106 @@ +package stats + +import ( + "math/rand" + "sync" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConcurrencyStats", func() { + It("concurrency", func() { + rand.Seed(time.Now().UnixNano()) + + concurrencyStats := NewConcurrencyStats() + concurrencyStats.Init() + initStats := concurrencyStats.Stats() + Expect(initStats.StartTime.IsZero()).To(BeFalse()) + Expect(initStats.TotalRecords).To(BeZero()) + Expect(initStats.Percentage()).To(Equal(0.0)) + + concurrency := 100 + totalBytes := make([]int64, concurrency) + failedBytes := make([]int64, concurrency) + succeededBytes := make([]int64, concurrency) + var ( + sumBytes int64 + sumFailedRecords int64 + sumRecords int64 + sumFailedBatches int64 + sumBatches int64 + ) + + var wg sync.WaitGroup + + for i := 0; i < concurrency; i++ { + totalBytes[i] = rand.Int63n(int64(1024*1024*1024)) + 1024 + sumBytes += totalBytes[i] + + wg.Add(1) + go func(index int) { + concurrencyStats.AddTotalBytes(totalBytes[index]) + wg.Done() + }(i) + + if rand.Intn(2) > 0 { + failedBytes[i] = rand.Int63n(totalBytes[i]) + 1 + + sumFailedRecords += 7 + sumRecords += 7 + sumFailedBatches += 1 + sumBatches += 1 + + wg.Add(1) + go func(nBytes int64) { + concurrencyStats.RequestFailed(7) + concurrencyStats.RequestFailed(7) + concurrencyStats.Failed(nBytes, 7) + wg.Done() + }(failedBytes[i]) + } + + succeededBytes[i] = totalBytes[i] - failedBytes[i] + if succeededBytes[i] > 0 { + sumRecords += 7 + sumBatches += 1 + + wg.Add(1) + go func(nBytes int64) { + concurrencyStats.RequestSucceeded(7, 9*time.Millisecond, 11*time.Millisecond) + concurrencyStats.RequestSucceeded(7, 9*time.Millisecond, 11*time.Millisecond) + concurrencyStats.Succeeded(nBytes, 7) + wg.Done() + }(succeededBytes[i]) + } + + wg.Add(1) + go func() { + _ = concurrencyStats.Stats() + _ = concurrencyStats.String() + wg.Done() + }() + } + + wg.Wait() + concurrencyStats.Init() + s := concurrencyStats.Stats() + Expect(s).To(Equal(&Stats{ + StartTime: initStats.StartTime, + ProcessedBytes: sumBytes, + TotalBytes: sumBytes, + FailedRecords: sumFailedRecords, + TotalRecords: sumRecords, + FailedRequest: sumFailedBatches * 2, + TotalRequest: sumBatches * 2, + TotalLatency: 9 * time.Millisecond * time.Duration(sumBatches-sumFailedBatches) * 2, + TotalRespTime: 11 * time.Millisecond * time.Duration(sumBatches-sumFailedBatches) * 2, + FailedProcessed: sumFailedRecords * 2, + TotalProcessed: sumRecords * 2, + })) + + Expect(s.Percentage()).To(Equal(100.0)) + Expect(s.String()).To(ContainSubstring("100.00%(")) + }) +}) diff --git a/pkg/stats/stats.go b/pkg/stats/stats.go new file mode 100644 index 00000000..9c4a1913 --- /dev/null +++ b/pkg/stats/stats.go @@ -0,0 +1,74 @@ +package stats + +import ( + "fmt" + "time" + + "github.com/dustin/go-humanize" +) + +type ( + Stats struct { + StartTime time.Time // The time to start statistics. + ProcessedBytes int64 // The processed bytes. + TotalBytes int64 // The total bytes. + FailedRecords int64 // The number of records that have failed to be processed. + TotalRecords int64 // The number of records that have been processed. + FailedRequest int64 // The number of requests that have failed. + TotalRequest int64 // The number of requests that have been processed. + TotalLatency time.Duration // The cumulative latency. + TotalRespTime time.Duration // The cumulative response time. + FailedProcessed int64 // The number of nodes and edges that have failed to be processed. + TotalProcessed int64 // The number of nodes and edges that have been processed. + } +) + +func (s *Stats) Percentage() float64 { + if s.TotalBytes == 0 { + return 0 + } + return float64(s.ProcessedBytes) / float64(s.TotalBytes) * 100 +} + +func (s *Stats) String() string { + var ( + duration = time.Since(s.StartTime) + percentage = s.Percentage() + remainingTime = "..." + seconds = duration.Seconds() + recordsPreSecond float64 + avgLatency time.Duration + avgRespTime time.Duration + requestPreSecond float64 + processedPreSecond float64 + ) + + if percentage > 0 { + remainingTime = time.Duration((100 - percentage) / percentage * float64(duration)).Truncate(time.Second).String() + } + + if s.TotalRecords > 0 { + recordsPreSecond = float64(s.TotalRecords) / seconds + } + + if s.TotalRequest > 0 { + avgLatency = s.TotalLatency / time.Duration(s.TotalRequest) + avgRespTime = s.TotalRespTime / time.Duration(s.TotalRequest) + requestPreSecond = float64(s.TotalRequest) / seconds + } + if s.TotalProcessed > 0 { + processedPreSecond = float64(s.TotalProcessed) / seconds + } + + return fmt.Sprintf("%s %s "+ + "%.2f%%(%s/%s) "+ + "Records{Finished: %d, Failed: %d, Rate: %.2f/s}, "+ + "Requests{Finished: %d, Failed: %d, Latency: %s/%s, Rate: %.2f/s}, "+ + "Processed{Finished: %d, Failed: %d, Rate: %.2f/s}", + duration.Truncate(time.Second), remainingTime, + percentage, humanize.IBytes(uint64(s.ProcessedBytes)), humanize.IBytes(uint64(s.TotalBytes)), + s.TotalRecords, s.FailedRecords, recordsPreSecond, + s.TotalRequest, s.FailedRequest, avgLatency, avgRespTime, requestPreSecond, + s.TotalProcessed, s.FailedProcessed, processedPreSecond, + ) +} diff --git a/pkg/stats/stats_suite_test.go b/pkg/stats/stats_suite_test.go new file mode 100644 index 00000000..4bc02b8f --- /dev/null +++ b/pkg/stats/stats_suite_test.go @@ -0,0 +1,13 @@ +package stats + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestStats(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg stats Suite") +} diff --git a/pkg/stats/stats_test.go b/pkg/stats/stats_test.go new file mode 100644 index 00000000..947d27f6 --- /dev/null +++ b/pkg/stats/stats_test.go @@ -0,0 +1,35 @@ +package stats + +import ( + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Stats", func() { + Describe(".String", func() { + It("TotalRecords is zero", func() { + s := &Stats{ + StartTime: time.Now(), + } + Expect(s.String()).Should(Equal("0s ... 0.00%(0 B/0 B) Records{Finished: 0, Failed: 0, Rate: 0.00/s}, Requests{Finished: 0, Failed: 0, Latency: 0s/0s, Rate: 0.00/s}, Processed{Finished: 0, Failed: 0, Rate: 0.00/s}")) + }) + It("TotalRecords is not zero", func() { + s := &Stats{ + StartTime: time.Now().Add(-time.Second * 10), + ProcessedBytes: 100 * 1024, + TotalBytes: 300 * 1024, + FailedRecords: 23, + TotalRecords: 1234, + FailedRequest: 1, + TotalRequest: 12, + TotalLatency: time.Second * 12, + TotalRespTime: 2 * time.Second * 12, + FailedProcessed: 2, + TotalProcessed: 5, + } + Expect(s.String()).Should(Equal("10s 20s 33.33%(100 KiB/300 KiB) Records{Finished: 1234, Failed: 23, Rate: 123.40/s}, Requests{Finished: 12, Failed: 1, Latency: 1s/2s, Rate: 1.20/s}, Processed{Finished: 5, Failed: 2, Rate: 0.50/s}")) + }) + }) +}) diff --git a/pkg/stats/statsmgr.go b/pkg/stats/statsmgr.go deleted file mode 100644 index eaa00045..00000000 --- a/pkg/stats/statsmgr.go +++ /dev/null @@ -1,145 +0,0 @@ -package stats - -import ( - "fmt" - "time" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/base" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" - "github.com/vesoft-inc/nebula-importer/v3/pkg/reader" -) - -type StatsMgr struct { - OutputStatsCh chan Stats - StatsCh chan base.Stats - DoneCh chan bool - Stats Stats - Done bool - CountFileDone bool - runnerLogger *logger.RunnerLogger -} - -type Stats struct { - NumFailed int64 `json:"numFailed"` - NumReadFailed int64 `json:"numReadFailed"` - TotalCount int64 `json:"totalCount"` - TotalBatches int64 `json:"totalBatches"` - TotalLatency int64 `json:"totalLatency"` - TotalReqTime int64 `json:"totalReqTime"` - TotalBytes int64 `json:"totalBytes"` - TotalImportedBytes int64 `json:"totalImportedBytes"` -} - -func NewStatsMgr(files []*config.File, runnerLogger *logger.RunnerLogger) *StatsMgr { - numReadingFiles := len(files) - stats := Stats{ - NumFailed: 0, - TotalBytes: 0, - TotalCount: 0, - TotalLatency: 0, - TotalBatches: 0, - TotalReqTime: 0.0, - } - m := StatsMgr{ - OutputStatsCh: make(chan Stats), - StatsCh: make(chan base.Stats), - DoneCh: make(chan bool), - Stats: stats, - runnerLogger: runnerLogger, - } - go m.startWorker(numReadingFiles) - return &m -} - -func (s *StatsMgr) Close() { - s.runnerLogger.Infof("Stats manager closing") - close(s.StatsCh) - close(s.DoneCh) - close(s.OutputStatsCh) - s.Done = true - s.runnerLogger.Infof("Stats manager closed") -} - -func (s *StatsMgr) updateStat(stat base.Stats) { - s.Stats.TotalBatches++ - s.Stats.TotalCount += int64(stat.BatchSize) - s.Stats.TotalReqTime += stat.ReqTime - s.Stats.TotalLatency += stat.Latency - s.Stats.TotalImportedBytes += stat.ImportedBytes -} - -func (s *StatsMgr) updateFailed(stat base.Stats) { - s.Stats.TotalBatches++ - s.Stats.TotalCount += int64(stat.BatchSize) - s.Stats.NumFailed += int64(stat.BatchSize) - s.Stats.TotalImportedBytes += stat.ImportedBytes -} - -func (s *StatsMgr) outputStats() { - s.OutputStatsCh <- s.Stats -} - -func (s *StatsMgr) print(prefix string, now time.Time) { - if s.Stats.TotalCount == 0 { - return - } - secs := time.Since(now).Seconds() - avgLatency := s.Stats.TotalLatency / s.Stats.TotalBatches - avgReq := s.Stats.TotalReqTime / s.Stats.TotalBatches - rps := float64(s.Stats.TotalCount) / secs - logger.Log.Infof("%s: Time(%.2fs), Finished(%d), Failed(%d), Read Failed(%d), Latency AVG(%dus), Batches Req AVG(%dus), Rows AVG(%.2f/s)", - prefix, secs, s.Stats.TotalCount, s.Stats.NumFailed, s.Stats.NumReadFailed, avgLatency, avgReq, rps) -} - -func (s *StatsMgr) CountFileBytes(freaders []*reader.FileReader) error { - if s.CountFileDone { - return nil - } - s.Stats.TotalBytes = 0 - for _, r := range freaders { - if r == nil { - continue - } - bytes, err := r.DataReader.TotalBytes() - if err != nil { - return err - } - s.Stats.TotalBytes += bytes - } - s.CountFileDone = true - return nil -} - -func (s *StatsMgr) startWorker(numReadingFiles int) { - ticker := time.NewTicker(5 * time.Second) - defer ticker.Stop() - now := time.Now() - for { - select { - case <-ticker.C: - s.print("Tick", now) - case stat, ok := <-s.StatsCh: - if !ok { - return - } - switch stat.Type { - case base.SUCCESS: - s.updateStat(stat) - case base.FAILURE: - s.updateFailed(stat) - case base.FILEDONE: - s.print(fmt.Sprintf("Done(%s)", stat.Filename), now) - numReadingFiles-- - s.runnerLogger.Infof("Remaining read files %d", numReadingFiles) - if numReadingFiles == 0 { - s.DoneCh <- true - } - case base.OUTPUT: - s.outputStats() - default: - logger.Log.Errorf("Error stats type: %s", stat.Type) - } - } - } -} diff --git a/pkg/utils/path.go b/pkg/utils/path.go new file mode 100644 index 00000000..f3a5e8c8 --- /dev/null +++ b/pkg/utils/path.go @@ -0,0 +1,11 @@ +package utils + +import "path/filepath" + +// RelativePathBaseOn changes relative path base on the basePath +func RelativePathBaseOn(basePath, filePath string) string { + if filepath.IsAbs(filePath) { + return filePath + } + return filepath.Join(basePath, filePath) +} diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go new file mode 100644 index 00000000..8241e587 --- /dev/null +++ b/pkg/utils/path_test.go @@ -0,0 +1,54 @@ +package utils + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("path", func() { + DescribeTable("RelativePathBaseOn", + func(basePath, filePath, ExpectFilePath string) { + Expect(RelativePathBaseOn(basePath, filePath)).To(Equal(ExpectFilePath)) + }, + EntryDescription("RelativePathBaseOn(%[1]q, %[2]q) == %[3]q"), + Entry(nil, ".", "f", "f"), + Entry(nil, "./d1", "f", "d1/f"), + Entry(nil, "./d1/", "f", "d1/f"), + Entry(nil, "./d1/d2", "f", "d1/d2/f"), + + Entry(nil, "/", "f", "/f"), + Entry(nil, "/d1", "f", "/d1/f"), + Entry(nil, "/d1/", "f", "/d1/f"), + Entry(nil, "/d1/d2", "f", "/d1/d2/f"), + + Entry(nil, "/", "d3/f", "/d3/f"), + Entry(nil, "/d1", "d3/f", "/d1/d3/f"), + Entry(nil, "/d1/", "d3/f", "/d1/d3/f"), + Entry(nil, "/d1/d2", "d3/f", "/d1/d2/d3/f"), + + Entry(nil, "/", "./d3/f", "/d3/f"), + Entry(nil, "/d1", "./d3/f", "/d1/d3/f"), + Entry(nil, "/d1/", "./d3/f", "/d1/d3/f"), + Entry(nil, "/d1/d2", "./d3/f", "/d1/d2/d3/f"), + + Entry(nil, "/", "../f", "/f"), + Entry(nil, "/d1", "../f", "/f"), + Entry(nil, "/d1/", "../f", "/f"), + Entry(nil, "/d1/d2", "../f", "/d1/f"), + + Entry(nil, "/", "../d3/f", "/d3/f"), + Entry(nil, "/d1", "../d3/f", "/d3/f"), + Entry(nil, "/d1/", "../d3/f", "/d3/f"), + Entry(nil, "/d1/d2", "../d3/f", "/d1/d3/f"), + + Entry(nil, "/", "/f", "/f"), + Entry(nil, "/d1", "/f", "/f"), + Entry(nil, "/d1/", "/f", "/f"), + Entry(nil, "/d1/d2", "/f", "/f"), + + Entry(nil, "/", "/d3/f", "/d3/f"), + Entry(nil, "/d1", "/d3/f", "/d3/f"), + Entry(nil, "/d1/", "/d3/f", "/d3/f"), + Entry(nil, "/d1/d2", "/d3/f", "/d3/f"), + ) +}) diff --git a/pkg/utils/string.go b/pkg/utils/string.go index e54a5dbb..8fb4ac39 100644 --- a/pkg/utils/string.go +++ b/pkg/utils/string.go @@ -1,5 +1,7 @@ package utils +import "strings" + func IsInteger(s string) bool { if s == "" { return false @@ -44,3 +46,19 @@ func IsDigit(b byte) bool { func IsHexDigit(b byte) bool { return IsDigit(b) || ('a' <= b && b <= 'f') || ('A' <= b && b <= 'F') } + +func ConvertIdentifier(s string) string { + const ( + backslashChar = '\\' + backslashString = string(backslashChar) + backtickChar = '`' + backtickString = string(backtickChar) + ) + if strings.IndexByte(s, backslashChar) >= 0 { + s = strings.ReplaceAll(s, backslashString, backslashString+backslashString) + } + if strings.IndexByte(s, backtickChar) >= 0 { + s = strings.ReplaceAll(s, backtickString, backslashString+backtickString) + } + return backtickString + s + backtickString +} diff --git a/pkg/utils/string_test.go b/pkg/utils/string_test.go new file mode 100644 index 00000000..7daf5b62 --- /dev/null +++ b/pkg/utils/string_test.go @@ -0,0 +1,119 @@ +package utils + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("string", func() { + DescribeTable("IsUnsignedInteger/IsInteger", + func(s string, isUnsignedInteger bool) { + Expect(IsUnsignedInteger(s)).To(Equal(isUnsignedInteger)) + Expect(IsInteger(s)).To(Equal(isUnsignedInteger)) + if len(s) > 0 && (s[0] != '-' && s[0] != '+') { + Expect(IsInteger("-" + s)).To(Equal(isUnsignedInteger)) + Expect(IsInteger("+" + s)).To(Equal(isUnsignedInteger)) + } + }, + EntryDescription("IsInteger(%[1]q) == %[2]t"), + Entry(nil, "", false), + Entry(nil, "0", true), + Entry(nil, "02", true), + Entry(nil, "12", true), + Entry(nil, "0x0", true), + Entry(nil, "0X0", true), + Entry(nil, "0x", false), + Entry(nil, "0X", false), + Entry(nil, "0123456789", true), + Entry(nil, "1234567890", true), + Entry(nil, "1234567890a", false), + Entry(nil, "a1234567890", false), + Entry(nil, "12345a67890", false), + Entry(nil, "0X1234567890", true), + Entry(nil, "0X1234567890abcdef", true), + Entry(nil, "0X1234567890ABCDEF", true), + Entry(nil, "0X1A2B3C4D5F6F7890", true), + Entry(nil, "0X1A2B3C4D5F6F7890g", false), + ) + + DescribeTable("IsDigit", + func(b byte, isDigit bool) { + Expect(IsDigit(b)).To(Equal(isDigit)) + }, + EntryDescription("IsDigit('%[1]c') == %[2]t"), + Entry(nil, byte(0), false), + Entry(nil, byte('0')-1, false), + Entry(nil, byte('0'), true), + Entry(nil, byte('1'), true), + Entry(nil, byte('2'), true), + Entry(nil, byte('3'), true), + Entry(nil, byte('4'), true), + Entry(nil, byte('5'), true), + Entry(nil, byte('6'), true), + Entry(nil, byte('7'), true), + Entry(nil, byte('8'), true), + Entry(nil, byte('9'), true), + Entry(nil, byte('9')+1, false), + Entry(nil, byte('a'), false), + Entry(nil, byte('A'), false), + Entry(nil, byte('\n'), false), + ) + + DescribeTable("IsDigit", + func(b byte, isDigit bool) { + Expect(IsHexDigit(b)).To(Equal(isDigit)) + }, + EntryDescription("IsDigit('%[1]c') == %[2]t"), + Entry(nil, byte(0), false), + Entry(nil, byte('0')-1, false), + Entry(nil, byte('0'), true), + Entry(nil, byte('1'), true), + Entry(nil, byte('2'), true), + Entry(nil, byte('3'), true), + Entry(nil, byte('4'), true), + Entry(nil, byte('5'), true), + Entry(nil, byte('6'), true), + Entry(nil, byte('7'), true), + Entry(nil, byte('8'), true), + Entry(nil, byte('9'), true), + Entry(nil, byte('9')+1, false), + Entry(nil, byte('a')-1, false), + Entry(nil, byte('a'), true), + Entry(nil, byte('b'), true), + Entry(nil, byte('c'), true), + Entry(nil, byte('d'), true), + Entry(nil, byte('e'), true), + Entry(nil, byte('f'), true), + Entry(nil, byte('f')+1, false), + Entry(nil, byte('A')-1, false), + Entry(nil, byte('A'), true), + Entry(nil, byte('B'), true), + Entry(nil, byte('C'), true), + Entry(nil, byte('D'), true), + Entry(nil, byte('E'), true), + Entry(nil, byte('F'), true), + Entry(nil, byte('F')+1, false), + Entry(nil, byte('\n'), false), + ) + + DescribeTable("ConvertIdentifier", + func(s, expect string) { + Expect(ConvertIdentifier(s)).To(Equal(expect)) + }, + EntryDescription("ConvertIdentifier(%[1]s) = %[2]s"), + Entry(nil, "", "``"), + Entry(nil, "`", "`\\``"), + Entry(nil, "``", "`\\`\\``"), + Entry(nil, "a`b`c", "`a\\`b\\`c`"), + Entry(nil, "`a`b`c", "`\\`a\\`b\\`c`"), + Entry(nil, "a`b`c`", "`a\\`b\\`c\\``"), + Entry(nil, "`a`b`c`", "`\\`a\\`b\\`c\\``"), + Entry(nil, "\\", "`\\\\`"), + Entry(nil, "\\\\", "`\\\\\\\\`"), + Entry(nil, "a\\b\\c", "`a\\\\b\\\\c`"), + Entry(nil, "\\a\\b\\c", "`\\\\a\\\\b\\\\c`"), + Entry(nil, "a\\b\\c\\", "`a\\\\b\\\\c\\\\`"), + Entry(nil, "\\a\\b\\c\\", "`\\\\a\\\\b\\\\c\\\\`"), + Entry(nil, "`\\a\\`b`\\c\\`", "`\\`\\\\a\\\\\\`b\\`\\\\c\\\\\\``"), + ) +}) diff --git a/pkg/utils/testdata/file10 b/pkg/utils/testdata/file10 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/utils/testdata/file11 b/pkg/utils/testdata/file11 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/utils/testdata/file20 b/pkg/utils/testdata/file20 new file mode 100644 index 00000000..e69de29b diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go new file mode 100644 index 00000000..8bbbb758 --- /dev/null +++ b/pkg/utils/utils.go @@ -0,0 +1,2 @@ +// Package utils implements some utility functions. +package utils diff --git a/pkg/utils/utils_suite_test.go b/pkg/utils/utils_suite_test.go new file mode 100644 index 00000000..49d42ce0 --- /dev/null +++ b/pkg/utils/utils_suite_test.go @@ -0,0 +1,13 @@ +package utils + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestUtils(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg utils Suite") +} diff --git a/pkg/utils/waitgroupmap.go b/pkg/utils/waitgroupmap.go new file mode 100644 index 00000000..69cd73d1 --- /dev/null +++ b/pkg/utils/waitgroupmap.go @@ -0,0 +1,132 @@ +package utils + +import "sync" + +type WaitGroupMap struct { + mu sync.RWMutex + m map[string]*sync.WaitGroup +} + +func NewWaitGroups() *WaitGroupMap { + return &WaitGroupMap{ + m: make(map[string]*sync.WaitGroup), + } +} + +func (w *WaitGroupMap) Add(delta int, key string) { + wg := w.getOrAddWaitGroup(key) + wg.Add(delta) +} + +func (w *WaitGroupMap) AddMany(delta int, keys ...string) { + switch len(keys) { + case 0: + return + case 1: + w.Add(delta, keys[0]) + return + case 2: + w.Add(delta, keys[0]) + w.Add(delta, keys[1]) + return + } + + for _, key := range keys { + wg := w.getOrAddWaitGroup(key) + wg.Add(delta) + } +} + +func (w *WaitGroupMap) Done(key string) { + w.mu.RLock() + wg := w.m[key] + w.mu.RUnlock() + if wg != nil { + wg.Done() + } +} + +func (w *WaitGroupMap) DoneMany(keys ...string) { //nolint:dupl + switch len(keys) { + case 0: + return + case 1: + w.Done(keys[0]) + return + case 2: + w.Done(keys[0]) + w.Done(keys[1]) + return + } + + wgs := make([]*sync.WaitGroup, 0, len(keys)) + + w.mu.RLock() + for _, key := range keys { + wg := w.m[key] + if wg != nil { + wgs = append(wgs, wg) + } + } + w.mu.RUnlock() + + for _, wg := range wgs { + wg.Done() + } +} + +func (w *WaitGroupMap) Wait(key string) { + w.mu.RLock() + wg := w.m[key] + w.mu.RUnlock() + if wg != nil { + wg.Wait() + } +} + +func (w *WaitGroupMap) WaitMany(keys ...string) { //nolint:dupl + switch len(keys) { + case 0: + return + case 1: + w.Wait(keys[0]) + return + case 2: + w.Wait(keys[0]) + w.Wait(keys[1]) + return + } + + wgs := make([]*sync.WaitGroup, 0, len(keys)) + + w.mu.RLock() + for _, key := range keys { + wg := w.m[key] + if wg != nil { + wgs = append(wgs, wg) + } + } + w.mu.RUnlock() + + for _, wg := range wgs { + wg.Wait() + } +} + +func (w *WaitGroupMap) getOrAddWaitGroup(key string) *sync.WaitGroup { + w.mu.RLock() + wg := w.m[key] + w.mu.RUnlock() + + if wg == nil { + w.mu.Lock() + wg = w.m[key] + if wg == nil { + wg = &sync.WaitGroup{} + w.m[key] = wg + } + w.mu.Unlock() + } + + return wg +} diff --git a/pkg/utils/waitgroupmap_test.go b/pkg/utils/waitgroupmap_test.go new file mode 100644 index 00000000..00b467e8 --- /dev/null +++ b/pkg/utils/waitgroupmap_test.go @@ -0,0 +1,81 @@ +package utils + +import ( + stderrors "errors" + "fmt" + "sync" + "time" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("WaitGroupMap", func() { + It("concurrency", func() { + wgMap := NewWaitGroups() + + keyNum := 25 + concurrencyPreKey := 200 + + var wgWaitAllKeys sync.WaitGroup + wgWaitAllKeys.Add(keyNum) + + finish := make(chan struct{}) + go func() { + wgWaitAllKeys.Wait() + close(finish) + }() + + for i := 0; i < keyNum; i++ { + key := fmt.Sprintf("key%d", i) + + var wgAddKeys sync.WaitGroup + wgAddKeys.Add(concurrencyPreKey) + + // add + go func(key string) { + for i := 0; i < concurrencyPreKey; i++ { + go func() { + wgMap.Add(1, key) + wgMap.AddMany(1) + wgMap.AddMany(1, key+"11") + wgMap.AddMany(1, key+"21", key+"21") + wgMap.AddMany(1, key+"31", key+"32", key+"33") + wgAddKeys.Done() + }() + } + }(key) + + // done + go func(key string) { + wgAddKeys.Wait() + for i := 0; i < concurrencyPreKey; i++ { + go func() { + wgMap.Done(key) + wgMap.DoneMany() + wgMap.DoneMany(key + "11") + wgMap.DoneMany(key+"21", key+"21") + wgMap.DoneMany(key+"31", key+"32", key+"33") + }() + } + }(key) + + // wait + go func(key string) { + wgAddKeys.Wait() + wgMap.Wait(key) + wgMap.WaitMany() + wgMap.WaitMany(key + "11") + wgMap.WaitMany(key+"21", key+"21") + wgMap.WaitMany(key+"31", key+"32", key+"33") + wgWaitAllKeys.Done() + }(key) + } + + select { + case <-finish: + case <-time.After(time.Second * 10): + Expect(stderrors.New("timeout")).NotTo(HaveOccurred()) + } + }) +}) diff --git a/pkg/version/version.go b/pkg/version/version.go index 0670a40a..071dfa88 100644 --- a/pkg/version/version.go +++ b/pkg/version/version.go @@ -1,5 +1,52 @@ package version -var GitHash string -var GoVersion string -var Tag string +import ( + "fmt" + "runtime" +) + +const undefined = "" + +var ( + buildVersion = undefined + buildCommit = undefined + buildCommitDate = undefined + buildDate = undefined +) + +type Version struct { + Version string + Commit string + CommitDate string + BuildDate string + GoVersion string + Platform string +} + +func GetVersion() *Version { + return &Version{ + Version: buildVersion, + Commit: buildCommit, + CommitDate: buildCommitDate, + BuildDate: buildDate, + GoVersion: runtime.Version(), + Platform: fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH), + } +} + +func (v *Version) String() string { + return fmt.Sprintf(`Version: %s +Commit: %s +CommitDate: %s +BuildDate: %s +GoVersion: %s +Platform: %s +`, + v.Version, + v.Commit, + v.CommitDate, + v.BuildDate, + v.GoVersion, + v.Platform, + ) +} diff --git a/pkg/version/version_suite_test.go b/pkg/version/version_suite_test.go new file mode 100644 index 00000000..4bdcb8dc --- /dev/null +++ b/pkg/version/version_suite_test.go @@ -0,0 +1,13 @@ +package version + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestVersion(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Pkg version Suite") +} diff --git a/pkg/version/version_test.go b/pkg/version/version_test.go new file mode 100644 index 00000000..9e70aa00 --- /dev/null +++ b/pkg/version/version_test.go @@ -0,0 +1,54 @@ +package version + +import ( + "fmt" + "runtime" + + "github.com/agiledragon/gomonkey/v2" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +var _ = Describe("Version", func() { + var v *Version + + BeforeEach(func() { + buildVersionPatches := gomonkey.ApplyGlobalVar(&buildVersion, "buildVersion") + defer buildVersionPatches.Reset() + buildCommitPatches := gomonkey.ApplyGlobalVar(&buildCommit, "buildCommit") + defer buildCommitPatches.Reset() + buildCommitDatePatches := gomonkey.ApplyGlobalVar(&buildCommitDate, "buildCommitDate") + defer buildCommitDatePatches.Reset() + buildDatePatches := gomonkey.ApplyGlobalVar(&buildDate, "buildDate") + defer buildDatePatches.Reset() + + v = GetVersion() + }) + Describe("GetVersion", func() { + It("should be", func() { + Expect(v).Should(Equal(&Version{ + Version: "buildVersion", + Commit: "buildCommit", + CommitDate: "buildCommitDate", + BuildDate: "buildDate", + GoVersion: runtime.Version(), + Platform: fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH), + })) + }) + }) + + Describe(".String", func() { + It("should be", func() { + Expect(v.String()).Should(Equal( + fmt.Sprintf(`Version: buildVersion +Commit: buildCommit +CommitDate: buildCommitDate +BuildDate: buildDate +GoVersion: %s +Platform: %s +`, runtime.Version(), fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH)), + )) + }) + }) + +}) diff --git a/pkg/web/httpserver.go b/pkg/web/httpserver.go deleted file mode 100644 index a7e9d1dc..00000000 --- a/pkg/web/httpserver.go +++ /dev/null @@ -1,237 +0,0 @@ -package web - -import ( - "bytes" - "encoding/json" - "fmt" - "net/http" - "strings" - "sync" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/cmd" - "github.com/vesoft-inc/nebula-importer/v3/pkg/config" - "github.com/vesoft-inc/nebula-importer/v3/pkg/errors" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type WebServer struct { - Port int - Callback string - server *http.Server - taskMgr *taskMgr - mux sync.Mutex - RunnerLogger logger.Logger -} - -var taskId uint64 = 0 - -func (w *WebServer) newTaskId() string { - w.mux.Lock() - defer w.mux.Unlock() - tid := taskId - taskId++ - return fmt.Sprintf("%d", tid) -} - -func (w *WebServer) Start() error { - logger.SetLogger(w.RunnerLogger) - m := http.NewServeMux() - w.taskMgr = newTaskMgr() - - m.HandleFunc("/submit", func(resp http.ResponseWriter, req *http.Request) { - if req.Method == "POST" { - w.submit(resp, req) - } else { - w.badRequest(resp, "HTTP method must be POST") - } - }) - - m.HandleFunc("/stop", func(resp http.ResponseWriter, req *http.Request) { - if req.Method == "PUT" { - w.stop(resp, req) - } else { - w.badRequest(resp, "HTTP method must be PUT") - } - }) - - m.HandleFunc("/tasks", func(resp http.ResponseWriter, req *http.Request) { - if req.Method == "GET" { - keys := w.taskMgr.keys() - var tasks struct { - Tasks []string `json:"tasks"` - } - tasks.Tasks = keys - if b, err := json.Marshal(tasks); err != nil { - w.badRequest(resp, err.Error()) - } else { - resp.WriteHeader(http.StatusOK) - if _, err = resp.Write(b); err != nil { - logger.Log.Error(err) - } - } - } else { - w.badRequest(resp, "HTTP method must be GET") - } - }) - - w.server = &http.Server{ - Addr: fmt.Sprintf(":%d", w.Port), - Handler: m, - } - - logger.Log.Infof("Starting http server on %d", w.Port) - return w.listenAndServe() -} - -func (w *WebServer) listenAndServe() error { - if err := w.server.ListenAndServe(); err != nil && err != http.ErrServerClosed { - logger.Log.Error(err) - return err - } - return nil -} - -type errResult struct { - ErrCode int `json:"errCode"` - ErrMsg string `json:"errMsg"` -} - -type task struct { - errResult - TaskId string `json:"taskId"` -} - -type respBody struct { - task - FailedRows int64 `json:"failedRows"` -} - -func (w *WebServer) callback(body *respBody) { - if b, err := json.Marshal(*body); err != nil { - logger.Log.Error(err) - } else { - _, err := http.Post(w.Callback, "application/json", bytes.NewBuffer(b)) - if err != nil { - logger.Log.Error(err) - } - } -} - -func (w *WebServer) stopRunner(taskId string) { - runner := w.taskMgr.get(taskId) - if runner == nil { - return - } - - for _, r := range runner.Readers { - r.Stop() - } - - logger.Log.Infof("Task %s stopped.", taskId) -} - -func (w *WebServer) stop(resp http.ResponseWriter, req *http.Request) { - if req.Body == nil { - w.badRequest(resp, "nil request body") - return - } - defer req.Body.Close() - - var task task - if err := json.NewDecoder(req.Body).Decode(&task); err != nil { - w.badRequest(resp, err.Error()) - return - } - - if strings.ToLower(task.TaskId) == "all" { - for _, k := range w.taskMgr.keys() { - w.stopRunner(k) - } - } else { - w.stopRunner(task.TaskId) - } - - resp.WriteHeader(http.StatusOK) - if _, err := fmt.Fprintln(resp, "OK"); err != nil { - logger.Log.Error(err) - } -} - -func (w *WebServer) badRequest(resp http.ResponseWriter, msg string) { - resp.WriteHeader(http.StatusOK) - t := errResult{ - ErrCode: 1, - ErrMsg: msg, - } - - if b, err := json.Marshal(t); err != nil { - logger.Log.Error(err) - } else { - resp.WriteHeader(http.StatusOK) - if _, err = resp.Write(b); err != nil { - logger.Log.Error(err) - } - } -} - -func (w *WebServer) submit(resp http.ResponseWriter, req *http.Request) { - if req.Body == nil { - w.badRequest(resp, "nil request body") - return - } - defer req.Body.Close() - - var conf config.YAMLConfig - if err := json.NewDecoder(req.Body).Decode(&conf); err != nil { - w.badRequest(resp, err.Error()) - return - } - - if err := conf.ValidateAndReset(""); err != nil { - w.badRequest(resp, err.Error()) - return - } - - runner := &cmd.Runner{} - tid := w.newTaskId() - w.taskMgr.put(tid, runner) - t := task{ - errResult: errResult{ErrCode: 0}, - TaskId: tid, - } - - go func(tid string) { - runner.Run(&conf) - var body respBody - rerr := runner.Error() - if rerr != nil { - err, _ := rerr.(errors.ImporterError) - logger.Log.Error(err) - body = respBody{ - task: task{ - errResult: errResult{ - ErrCode: err.ErrCode, - ErrMsg: err.ErrMsg.Error(), - }, - TaskId: tid, - }, - } - } else { - body = respBody{ - task: t, - FailedRows: runner.NumFailed, - } - } - w.callback(&body) - w.taskMgr.del(tid) - }(tid) - - if b, err := json.Marshal(t); err != nil { - w.badRequest(resp, err.Error()) - } else { - resp.WriteHeader(http.StatusOK) - if _, err := resp.Write(b); err != nil { - logger.Log.Error(err) - } - } -} diff --git a/pkg/web/taskmgr.go b/pkg/web/taskmgr.go deleted file mode 100644 index b1f98be2..00000000 --- a/pkg/web/taskmgr.go +++ /dev/null @@ -1,52 +0,0 @@ -package web - -import ( - "sync" - - "github.com/vesoft-inc/nebula-importer/v3/pkg/cmd" - "github.com/vesoft-inc/nebula-importer/v3/pkg/logger" -) - -type taskMgr struct { - tasks map[string]*cmd.Runner - mux sync.Mutex -} - -func newTaskMgr() *taskMgr { - return &taskMgr{ - tasks: make(map[string]*cmd.Runner), - } -} - -func (m *taskMgr) keys() []string { - m.mux.Lock() - defer m.mux.Unlock() - var keys []string - for k := range m.tasks { - keys = append(keys, k) - } - return keys -} - -func (m *taskMgr) put(k string, r *cmd.Runner) { - m.mux.Lock() - defer m.mux.Unlock() - m.tasks[k] = r -} - -func (m *taskMgr) get(k string) *cmd.Runner { - m.mux.Lock() - defer m.mux.Unlock() - if v, ok := m.tasks[k]; !ok { - logger.Log.Errorf("Fail to get %s value from task manager", k) - return nil - } else { - return v - } -} - -func (m *taskMgr) del(k string) { - m.mux.Lock() - defer m.mux.Unlock() - delete(m.tasks, k) -}