From e31f3c152afaffbfddd1bbc53a40129f8ac68a5b Mon Sep 17 00:00:00 2001 From: Vee Zhang Date: Thu, 18 May 2023 19:15:19 +0800 Subject: [PATCH] enha: integration testing --- .github/workflows/build.yml | 16 +- .goreleaser.yaml | 57 +- Dockerfile | 23 +- Makefile | 3 + README.md | 55 +- docs/configuration-reference.md | 12 +- examples/oss/oss.v3.yaml | 8 +- examples/s3/s3.v3.yaml | 8 +- go.mod | 8 +- go.sum | 18 +- integration-testing/Dockerfile | 16 + integration-testing/docker-compose.yaml | 142 +++++ .../testdata/basic/basic.int.v3.yaml | 147 +++++ .../testdata/basic/basic.string.v3.yaml | 96 ++++ integration-testing/testdata/basic/knows.csv | 9 + .../testdata/basic/logs/.gitkeep | 0 integration-testing/testdata/basic/person.csv | 13 + .../testdata/cases-int/cases.int.v3.yaml | 65 +++ .../testdata/cases-int/course.csv | 7 + .../testdata/cases-string/basic_type_test.csv | 12 + .../cases-string/cases.string.v3.yaml | 543 ++++++++++++++++++ .../testdata/cases-string/choose.csv | 4 + .../cases-string/course-lazy-quotes.csv | 7 + .../cases-string/course-with-header.csv | 5 + .../testdata/cases-string/course.csv | 7 + .../testdata/cases-string/date_test.csv | 3 + .../cases-string/follow-delimiter.csv | 3 + .../testdata/cases-string/follow.csv | 4 + .../testdata/cases-string/geography_test.csv | 4 + .../testdata/cases-string/glob-follow-1.csv | 3 + .../testdata/cases-string/glob-follow-2.csv | 1 + .../testdata/cases-string/student.csv | 3 + pkg/cmd/nebula-importer.go | 13 +- pkg/logger/logger.go | 2 +- pkg/logger/nop.go | 2 +- pkg/logger/zap.go | 8 +- pkg/source/oss.go | 12 +- pkg/source/oss_test.go | 60 +- pkg/source/s3.go | 18 +- pkg/source/s3_test.go | 28 +- pkg/spec/v3/value.go | 2 + pkg/spec/v3/value_test.go | 1 + pkg/stats/stats.go | 4 + pkg/stats/stats_test.go | 1 + 44 files changed, 1301 insertions(+), 152 deletions(-) create mode 100644 integration-testing/Dockerfile create mode 100644 integration-testing/docker-compose.yaml create mode 100644 integration-testing/testdata/basic/basic.int.v3.yaml create mode 100644 integration-testing/testdata/basic/basic.string.v3.yaml create mode 100644 integration-testing/testdata/basic/knows.csv create mode 100644 integration-testing/testdata/basic/logs/.gitkeep create mode 100644 integration-testing/testdata/basic/person.csv create mode 100644 integration-testing/testdata/cases-int/cases.int.v3.yaml create mode 100644 integration-testing/testdata/cases-int/course.csv create mode 100644 integration-testing/testdata/cases-string/basic_type_test.csv create mode 100644 integration-testing/testdata/cases-string/cases.string.v3.yaml create mode 100644 integration-testing/testdata/cases-string/choose.csv create mode 100644 integration-testing/testdata/cases-string/course-lazy-quotes.csv create mode 100644 integration-testing/testdata/cases-string/course-with-header.csv create mode 100644 integration-testing/testdata/cases-string/course.csv create mode 100644 integration-testing/testdata/cases-string/date_test.csv create mode 100644 integration-testing/testdata/cases-string/follow-delimiter.csv create mode 100644 integration-testing/testdata/cases-string/follow.csv create mode 100644 integration-testing/testdata/cases-string/geography_test.csv create mode 100644 integration-testing/testdata/cases-string/glob-follow-1.csv create mode 100644 integration-testing/testdata/cases-string/glob-follow-2.csv create mode 100644 integration-testing/testdata/cases-string/student.csv diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ae8f0eba..a18cff58 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,7 +8,7 @@ on: branches: - 'master' tags: - - 'v*' + - 'v[0-9]+.[0-9]+.[0-9]+*' permissions: contents: write @@ -27,22 +27,24 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: 1.19 - cache: true - run: | go env -w GOFLAGS=-buildvcs=false git config --global --add safe.directory $(pwd) + - run: | + curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + chmod +x /usr/local/bin/docker-compose - run: make lint - run: make test + - run: make test-it # integration-testing - uses: codecov/codecov-action@v2 - uses: docker/login-action@v1 if: success() && startsWith(github.ref, 'refs/tags/') with: - registry: ${{ secrets.HARBOR_REGISTRY }} - username: ${{ secrets.HARBOR_USERNAME }} - password: ${{ secrets.HARBOR_PASSWORD }} + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} - uses: goreleaser/goreleaser-action@v2 if: success() && startsWith(github.ref, 'refs/tags/') with: @@ -50,4 +52,6 @@ jobs: distribution: goreleaser args: release --rm-dist env: + GORELEASER_CURRENT_TAG: ${{ github.ref_name }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DOCKER_IMAGE_REPO: vesoft/ diff --git a/.goreleaser.yaml b/.goreleaser.yaml index ca9d63ce..6ad03bb5 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -1,7 +1,7 @@ project_name: nebula-importer release: - name_template: "Nebula Importer {{.Version}}" + name_template: "NebulaGraph Importer {{.Version}}" before: hooks: @@ -33,7 +33,9 @@ builds: - "7" archives: - - name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + - id: archives + name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + wrap_in_directory: true replacements: darwin: Darwin linux: Linux @@ -41,17 +43,28 @@ archives: 386: i386 amd64: x86_64 files: - - README.md + - none* # https://goreleaser.com/customization/archive/?h=archives#packaging-only-the-binaries format_overrides: - goos: windows format: zip + - id: binary + name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + format: binary + replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 + files: + - none* # https://goreleaser.com/customization/archive/?h=archives#packaging-only-the-binaries dockers: - &dockers image_templates: - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-amd64" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-amd64" use: buildx - dockerfile: Dockerfile.goreleaser + dockerfile: Dockerfile build_flag_templates: - "--platform=linux/amd64" - "--pull" @@ -64,7 +77,7 @@ dockers: - "--label=org.opencontainers.image.revision={{ .FullCommit }}" - <<: *dockers image_templates: - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-armv7" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-armv7" goarch: arm goarm: 7 build_flag_templates: @@ -79,7 +92,7 @@ dockers: - "--label=org.opencontainers.image.revision={{ .FullCommit }}" - <<: *dockers image_templates: - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-arm64v8" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-arm64v8" goarch: arm64 build_flag_templates: - "--platform=linux/arm64/v8" @@ -93,19 +106,37 @@ dockers: - "--label=org.opencontainers.image.revision={{ .FullCommit }}" docker_manifests: - &docker_manifests - name_template: "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:latest" + name_template: "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}" + image_templates: + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-amd64" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-armv7" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-arm64v8" + - <<: *docker_manifests + name_template: "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:v{{ .Major }}" + skip_push: auto image_templates: - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-amd64" - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-armv7" - - "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}-arm64v8" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-amd64" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-armv7" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-arm64v8" - <<: *docker_manifests - name_template: "reg.vesoft-inc.com/vesoft-ent/{{ .ProjectName }}:{{ .Version }}" + name_template: "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:v{{ .Major }}.{{ .Minor }}" + skip_push: auto + image_templates: + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-amd64" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-armv7" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-arm64v8" + - <<: *docker_manifests + name_template: "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:latest" + image_templates: + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-amd64" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-armv7" + - "{{ .Env.DOCKER_IMAGE_REPO }}{{ .ProjectName }}:{{ .Tag }}-arm64v8" nfpms: - file_name_template: '{{ .ConventionalFileName }}' id: packages homepage: https://github.com/vesoft-inc/nebula-importer - description: Nebula Importer. + description: NebulaGraph Importer. maintainer: Vee Zhang vendor: Vesoft Inc. bindir: /usr/bin diff --git a/Dockerfile b/Dockerfile index ce77cfb8..3604012c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,24 +1,7 @@ -FROM reg.vesoft-inc.com/ci/golang:1.18-alpine AS builder - -LABEL stage=gobuilder - -ENV CGO_ENABLED 0 -ENV GOOS linux -WORKDIR /build/zero - -ADD go.mod . -ADD go.sum . -COPY pkg pkg -COPY cmd cmd -RUN go mod download - -RUN go build -ldflags="-s -w" -o /usr/bin/nebula-importer ./cmd/nebula-importer - -FROM reg.vesoft-inc.com/ci/alpine +FROM reg.vesoft-inc.com/proxy/library/alpine RUN apk update --no-cache && apk add --no-cache ca-certificates tzdata -ENV TZ Asia/Shanghai -COPY --from=builder /usr/bin/nebula-importer /usr/bin/nebula-importer +ADD nebula-importer /usr/local/bin/nebula-importer -ENTRYPOINT ["/usr/bin/nebula-importer"] +ENTRYPOINT ["/usr/local/bin/nebula-importer"] \ No newline at end of file diff --git a/Makefile b/Makefile index 469a0a1c..db1d8882 100644 --- a/Makefile +++ b/Makefile @@ -44,6 +44,9 @@ build: test: go test -gcflags=all="-l" -race -coverprofile=coverage.txt -covermode=atomic ./pkg/... +test-it: # integration-testing + docker-compose -f integration-testing/docker-compose.yaml up --build --exit-code-from importer + docker-build: docker build -t "${DOCKER_REPO}/nebula-importer:${IMAGE_TAG}" -f Dockerfile . diff --git a/README.md b/README.md index 17f55b40..aada906a 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,9 @@ [![GolangCI](https://golangci.com/badges/github.com/vesoft-inc/nebula-importer.svg)](https://golangci.com/r/github.com/vesoft-inc/nebula-importer) [![GoDoc](https://godoc.org/github.com/vesoft-inc/nebula-importer?status.svg)](https://godoc.org/github.com/vesoft-inc/nebula-importer) -# What is Nebula Importer? +# What is NebulaGraph Importer? -**Nebula Importer** is a tool to import data into [NebulaGraph](https://github.com/vesoft-inc/nebula). +**NebulaGraph Importer** is a tool to import data into [NebulaGraph](https://github.com/vesoft-inc/nebula). ## Features @@ -28,6 +28,7 @@ Download the packages on the [Releases page](https://github.com/vesoft-inc/nebul You can choose according to your needs, the following installation packages are supported: * binary +* archives * apk * deb * rpm @@ -41,8 +42,17 @@ $ go install github.com/vesoft-inc/nebula-importer/cmd/nebula-importer@latest ### From docker ```shell -$ docker pull vesoft/nebula-importer -$ docker run -it -v :/config.yaml --rm vesoft/nebula-importer -c /config.yaml +$ docker pull vesoft/nebula-importer: +$ docker run --rm -ti \ + --network=host \ + -v : \ + -v : \ + vesoft/nebula-importer: + --config + +# config_file: the absolute path to the configuration file. +# data_dir: the absolute path to the data directory, ignore if not a local file. +# version: the version of NebulaGraph Importer. ``` ### From Source Code @@ -57,12 +67,12 @@ You can find a binary named `nebula-importer` in `bin` directory. ## Configuration Instructions -`Nebula Importer`'s configuration file is in YAML format. You can find some examples in [examples](examples/). +`NebulaGraph Importer`'s configuration file is in YAML format. You can find some examples in [examples](examples/). Configuration options are divided into four groups: * `client` is configuration options related to the NebulaGraph connection client. -* `manager` is global control configuration options related to Nebula Importer. +* `manager` is global control configuration options related to NebulaGraph Importer. * `log` is configuration options related to printing logs. * `sources` is the data source configuration items. @@ -100,14 +110,19 @@ client: hooks: before: - statements: - - statements1 - - statements2 - wait: 10s + - UPDATE CONFIGS storage:wal_ttl=3600; + - UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; - statements: - - statements3 + - | + DROP SPACE IF EXISTS basic_int_examples; + CREATE SPACE IF NOT EXISTS basic_int_examples(partition_num=5, replica_factor=1, vid_type=int); + USE basic_int_examples; + wait: 10s after: - statements: - - statements4 + - | + UPDATE CONFIGS storage:wal_ttl=86400; + UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; ``` * `manager.spaceName`: **Required**. Specifies which space the data is imported into. @@ -132,7 +147,7 @@ log: - logs/nebula-importer.log ``` -* `log.level`: **Optional**. Specifies the log level. The default value is `INFO`. +* `log.level`: **Optional**. Specifies the log level, optional values is `DEBUG`, `INFO`, `WARN`, `ERROR`, `PANIC` or `FATAL`. The default value is `INFO`. * `log.console`: **Optional**. Specifies whether to print logs to the console. The default value is `true`. * `log.files`: **Optional**. Specifies which files to print logs to. @@ -168,16 +183,16 @@ s3: region: bucket: key: - accessKey: - secretKey: + accessKeyID: + accessKeySecret: ``` * `endpoint`: **Optional**. The endpoint of s3 service, can be omitted if using aws s3. * `region`: **Required**. The region of s3 service. * `bucket`: **Required**. The bucket of file in s3 service. * `key`: **Required**. The object key of file in s3 service. -* `accessKey`: **Optional**. The access key of s3 service. If it is public data, no need to configure. -* `secretKey`: **Optional**. The secret key of s3 service. If it is public data, no need to configure. +* `accessKeyID`: **Optional**. The `Access Key ID` of s3 service. If it is public data, no need to configure. +* `accessKeySecret`: **Optional**. The `Access Key Secret` of s3 service. If it is public data, no need to configure. #### oss @@ -188,15 +203,15 @@ oss: endpoint: bucket: key: - accessKey: - secretKey: + accessKeyID: + accessKeySecret: ``` * `endpoint`: **Required**. The endpoint of oss service. * `bucket`: **Required**. The bucket of file in oss service. * `key`: **Required**. The object key of file in oss service. -* `accessKey`: **Required**. The access key of oss service. -* `secretKey`: **Required**. The secret key of oss service. +* `accessKeyID`: **Required**. The `Access Key ID` of oss service. +* `accessKeySecret`: **Required**. The `Access Key Secret` of oss service. #### ftp diff --git a/docs/configuration-reference.md b/docs/configuration-reference.md index 77e1c6b0..6c8ed6a8 100644 --- a/docs/configuration-reference.md +++ b/docs/configuration-reference.md @@ -1,4 +1,4 @@ -# Nebula Importer Configuration Description +# NebulaGraph Importer Configuration Description | options | description | default | | :-- | :-- | :-- | @@ -12,7 +12,7 @@ | client.retry | The failed retrying times to execute nGQL queries in NebulaGraph client. | 3 | | client.retryInitialInterval | The initialization interval retrying. | 1s | | | | | -| manager | The global control configuration options related to Nebula Importer. | - | +| manager | The global control configuration options related to NebulaGraph Importer. | - | | manager.spaceName | Specifies which space the data is imported into. | - | | manager.batch | Specifies the batch size for all sources of the inserted data. | 128 | | manager.readerConcurrency | Specifies the concurrency of reader to read from sources. | 50 | @@ -36,13 +36,13 @@ | sources[].s3.region | The region of s3 service. | - | | sources[].s3.bucket | The bucket of file in s3 service. | - | | sources[].s3.key | The object key of file in s3 service. | - | -| sources[].s3.accessKey | The access key of s3 service. | - | -| sources[].s3.secretKey | The secret key of s3 service. | - | +| sources[].s3.accessKeyID | The `Access Key ID` of s3 service. | - | +| sources[].s3.accessKeySecret | The `Access Key Secret` of s3 service. | - | | sources[].oss.endpoint | The endpoint of oss service. | - | | sources[].oss.bucket | The bucket of file in oss service. | - | | sources[].oss.key | The object key of file in oss service. | - | -| sources[].oss.accessKey | The access key of oss service. | - | -| sources[].oss.secretKey | The secret key of oss service. | - | +| sources[].oss.accessKeyID | The `Access Key ID` of oss service. | - | +| sources[].oss.accessKeySecret | The `Access Key Secret` of oss service. | - | | sources[].ftp.host | The host of ftp service. | - | | sources[].ftp.host | The port of ftp service. | - | | sources[].ftp.user | The user of ftp service. | - | diff --git a/examples/oss/oss.v3.yaml b/examples/oss/oss.v3.yaml index c362057e..be835aca 100644 --- a/examples/oss/oss.v3.yaml +++ b/examples/oss/oss.v3.yaml @@ -32,10 +32,10 @@ log: sources: - oss: endpoint: https://oss-cn-hangzhou.aliyuncs.com - bucket: bucketName - key: objectKey - accessKey: accessKey - secretKey: secretKey + bucket: bucket-name + key: object-key + accessKeyID: "Access Key ID" + accessKeySecret: "Access Key Secret" csv: delimiter: "\t" tags: diff --git a/examples/s3/s3.v3.yaml b/examples/s3/s3.v3.yaml index 0b6244d6..0a447eeb 100644 --- a/examples/s3/s3.v3.yaml +++ b/examples/s3/s3.v3.yaml @@ -34,8 +34,8 @@ sources: region: us-east-1 bucket: gdelt-open-data key: events/20190918.export.csv - # accessKey: "" - # secretKey: "" + # accessKeyID: "" + # accessKeySecret: "" csv: delimiter: "\t" tags: @@ -52,8 +52,8 @@ sources: region: us-east-1 bucket: gdelt-open-data key: events/20190918.export.csv - accessKey: "accessKey" - secretKey: "secretKey" + accessKeyID: "Access Key ID" + accessKeySecret: "Access Key Secret" csv: delimiter: "\t" tags: diff --git a/go.mod b/go.mod index 213fb123..c2dbe7ad 100644 --- a/go.mod +++ b/go.mod @@ -20,7 +20,7 @@ require ( github.com/spf13/afero v1.9.3 github.com/spf13/cobra v1.6.1 github.com/valyala/bytebufferpool v1.0.0 - github.com/vesoft-inc/nebula-go/v3 v3.3.1 + github.com/vesoft-inc/nebula-go/v3 v3.5.0 go.uber.org/zap v1.23.0 golang.org/x/crypto v0.5.0 gopkg.in/yaml.v3 v3.0.1 @@ -47,9 +47,9 @@ require ( github.com/spf13/pflag v1.0.5 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect - golang.org/x/net v0.5.0 // indirect - golang.org/x/sys v0.4.0 // indirect - golang.org/x/text v0.6.0 // indirect + golang.org/x/net v0.10.0 // indirect + golang.org/x/sys v0.8.0 // indirect + golang.org/x/text v0.9.0 // indirect golang.org/x/time v0.3.0 // indirect google.golang.org/protobuf v1.28.1 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect diff --git a/go.sum b/go.sum index 5fa94804..810dc876 100644 --- a/go.sum +++ b/go.sum @@ -226,8 +226,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/vesoft-inc/nebula-go/v3 v3.3.1 h1:5DxUxswEQvK9gkK6Y/X4fhX+bmIeHIJrn+b2q7tE3HM= -github.com/vesoft-inc/nebula-go/v3 v3.3.1/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= +github.com/vesoft-inc/nebula-go/v3 v3.5.0 h1:2ZSkoBxtIfs15AXJXqrAPDPd0Z9HrzKR7YKXPqlJcR0= +github.com/vesoft-inc/nebula-go/v3 v3.5.0/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -329,8 +329,8 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.5.0 h1:GyT4nK/YDHSqa1c4753ouYCDajOYKTja9Xb/OHtgvSw= -golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= +golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -393,12 +393,12 @@ golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.4.0 h1:Zr2JFtRQNX3BCZ8YtxRE9hNJYC8J6I1MVbMg6owUp18= -golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.4.0 h1:O7UWfv5+A2qiuulQk30kVinPoMtoIPeVaKLEgLpVkvg= +golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -408,8 +408,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.6.0 h1:3XmdazWV+ubf7QgHSTWeykHOci5oeekaGJBLkrkaw4k= -golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/integration-testing/Dockerfile b/integration-testing/Dockerfile new file mode 100644 index 00000000..a857c74f --- /dev/null +++ b/integration-testing/Dockerfile @@ -0,0 +1,16 @@ +FROM reg.vesoft-inc.com/proxy/library/golang:1.19-alpine + +LABEL stage=gobuilder + +ENV CGO_ENABLED 0 +ENV GOOS linux +WORKDIR /build/zero + +RUN go install github.com/vesoft-inc/nebula-console@latest + +ADD . . + +RUN go mod download +RUN go build -ldflags="-s -w" -o /usr/bin/nebula-importer ./cmd/nebula-importer + +ENTRYPOINT ["sh", "-c"] diff --git a/integration-testing/docker-compose.yaml b/integration-testing/docker-compose.yaml new file mode 100644 index 00000000..933fe446 --- /dev/null +++ b/integration-testing/docker-compose.yaml @@ -0,0 +1,142 @@ +version: '3' +services: + metad: + image: vesoft/nebula-metad:nightly + environment: + USER: root + TZ: UTC + command: + - --meta_server_addrs=metad:9559 + - --local_ip=metad + - --ws_ip=metad + - --port=9559 + - --ws_http_port=19559 + healthcheck: + test: ["CMD", "curl", "-f", "http://metad:19559/status"] + interval: 15s + timeout: 10s + retries: 3 + start_period: 20s + networks: + nebula-net: + restart: on-failure + cap_add: + - SYS_PTRACE + + graphd1: + image: vesoft/nebula-graphd:nightly + environment: + USER: root + TZ: UTC + command: + - --meta_server_addrs=metad:9559 + - --local_ip=graphd1 + - --ws_ip=graphd1 + - --port=9669 + - --ws_http_port=19669 + - --local_config=false + depends_on: + - metad + - storaged + healthcheck: + test: ["CMD", "curl", "-f", "http://graphd1:19669/status"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 20s + networks: + nebula-net: + restart: on-failure + cap_add: + - SYS_PTRACE + + graphd2: + image: vesoft/nebula-graphd:nightly + environment: + USER: root + TZ: UTC + command: + - --meta_server_addrs=metad:9559 + - --local_ip=graphd1 + - --ws_ip=graphd2 + - --port=9669 + - --ws_http_port=19669 + - --enable_authorize=true + - --local_config=false + depends_on: + - metad + - storaged + healthcheck: + test: ["CMD", "curl", "-f", "http://graphd2:19669/status"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 20s + networks: + nebula-net: + restart: on-failure + cap_add: + - SYS_PTRACE + + storaged: + image: vesoft/nebula-storaged:nightly + environment: + USER: root + TZ: UTC + command: + - --meta_server_addrs=metad:9559 + - --local_ip=storaged + - --ws_ip=storaged + - --port=9779 + - --ws_http_port=19779 + - --local_config=false + depends_on: + - metad + healthcheck: + test: ["CMD", "curl", "-f", "http://storaged:19779/status"] + interval: 15s + timeout: 10s + retries: 3 + start_period: 20s + networks: + nebula-net: + restart: on-failure + cap_add: + - SYS_PTRACE + + importer: + build: + context: ../ + dockerfile: ./integration-testing/Dockerfile + depends_on: + - graphd1 + - graphd2 + - metad + - storaged + networks: + nebula-net: + entrypoint: "" + command: + - sh + - -c + - | + for i in `seq 1 60`; do + echo "Adding hosts..." + nebula-console -addr graphd1 -port 9669 -u root -p nebula -e 'ADD HOSTS "storaged":9779' + if [[ $$? == 0 ]];then + echo "Add hosts succeed" + break + fi + sleep 1 + echo "retry to add hosts." + done + + set -e + + nebula-importer -c ./integration-testing/testdata/basic/basic.int.v3.yaml + nebula-importer -c ./integration-testing/testdata/basic/basic.string.v3.yaml + nebula-importer -c ./integration-testing/testdata/cases-string/cases.string.v3.yaml + nebula-importer -c ./integration-testing/testdata/cases-int/cases.int.v3.yaml + +networks: + nebula-net: \ No newline at end of file diff --git a/integration-testing/testdata/basic/basic.int.v3.yaml b/integration-testing/testdata/basic/basic.int.v3.yaml new file mode 100644 index 00000000..c2c30879 --- /dev/null +++ b/integration-testing/testdata/basic/basic.int.v3.yaml @@ -0,0 +1,147 @@ +client: + version: v3 + address: "graphd1:9669,graphd2:9669" + user: root + password: nebula + concurrencyPerAddress: 1 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: basic_int_tests + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + DROP SPACE IF EXISTS basic_int_tests; + CREATE SPACE IF NOT EXISTS basic_int_tests(partition_num=5, replica_factor=1, vid_type=int); + USE basic_int_tests; + CREATE TAG Person(firstName STRING, lastName STRING, gender STRING, birthday DATE, creationDate DATETIME, locationIP STRING, browserUsed STRING); + CREATE EDGE KNOWS(creationDate DATETIME); + wait: 15s + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + +sources: + - path: ./person.csv + csv: + delimiter: "|" + tags: + - name: Person + id: + type: "INT" + index: 0 + ignoreExistedIndex: true + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: male + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + nullable: true + alternativeIndices: + - 6 + - name: Person + id: + type: "INT" + index: 0 + function: hash + ignoreExistedIndex: true + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: male + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + nullable: true + alternativeIndices: + - 6 + - path: ./knows.csv + edges: + - name: KNOWS # person_knows_person + src: + id: + type: "INT" + index: 0 + dst: + id: + type: "INT" + index: 1 + ignoreExistedIndex: true + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 + - name: KNOWS # person_knows_person + src: + id: + type: "INT" + index: 0 + function: hash + dst: + id: + type: "INT" + index: 1 + function: hash + rank: + index: 0 + ignoreExistedIndex: false + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 diff --git a/integration-testing/testdata/basic/basic.string.v3.yaml b/integration-testing/testdata/basic/basic.string.v3.yaml new file mode 100644 index 00000000..431e3d0e --- /dev/null +++ b/integration-testing/testdata/basic/basic.string.v3.yaml @@ -0,0 +1,96 @@ +client: + version: v3 + address: "graphd1:9669,graphd2:9669" + user: root + password: nebula + concurrencyPerAddress: 10 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: basic_string_tests + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + DROP SPACE IF EXISTS basic_string_tests; + CREATE SPACE IF NOT EXISTS basic_string_tests(partition_num=5, replica_factor=1, vid_type=FIXED_STRING(64)); + USE basic_string_tests; + CREATE TAG Person(firstName STRING, lastName STRING, gender STRING, birthday DATE, creationDate DATETIME, locationIP STRING, browserUsed STRING); + CREATE EDGE KNOWS(creationDate DATETIME); + wait: 15s + +log: + level: INFO + console: true + files: + - logs/nebula-importer.log + +sources: + - path: ./person.csv + csv: + delimiter: "|" + tags: + - name: Person + id: + type: "STRING" + concatItems: + - person_ + - 0 + - _id + props: + - name: "firstName" + type: "STRING" + index: 1 + - name: "lastName" + type: "STRING" + index: 2 + - name: "gender" + type: "STRING" + index: 3 + nullable: true + defaultValue: female + - name: "birthday" + type: "DATE" + index: 4 + nullable: true + nullValue: _NULL_ + - name: "creationDate" + type: "DATETIME" + index: 5 + - name: "locationIP" + type: "STRING" + index: 6 + - name: "browserUsed" + type: "STRING" + index: 7 + - path: ./knows.csv + batch: 256 + edges: + - name: KNOWS # person_knows_person + src: + id: + type: "STRING" + concatItems: + - person_ + - 0 + - _id + dst: + id: + type: "STRING" + concatItems: + - person_ + - 1 + - _id + props: + - name: "creationDate" + type: "DATETIME" + index: 2 + nullable: true + nullValue: _NULL_ + defaultValue: 0000-00-00T00:00:00 diff --git a/integration-testing/testdata/basic/knows.csv b/integration-testing/testdata/basic/knows.csv new file mode 100644 index 00000000..704df2e8 --- /dev/null +++ b/integration-testing/testdata/basic/knows.csv @@ -0,0 +1,9 @@ +933,2199023256684,2010-04-22T12:30:57 +933,10995116278700,2010-11-15T07:23:49 +933,21990232556027,2011-12-15T02:34:43 +1129,21990232556027,2010-04-13T16:44:22 +1129,10995116278700,2011-01-02T22:39:28 +1129,4398046512167,2011-09-17T21:40:17 +1129,6597069767117,2012-01-19T19:21:54 +21990232556027,6597069767117,2012-04-15T04:24:28 +6597069767117,4398046512167,2012-05-29T18:00:10 diff --git a/integration-testing/testdata/basic/logs/.gitkeep b/integration-testing/testdata/basic/logs/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/integration-testing/testdata/basic/person.csv b/integration-testing/testdata/basic/person.csv new file mode 100644 index 00000000..cf5aa9c7 --- /dev/null +++ b/integration-testing/testdata/basic/person.csv @@ -0,0 +1,13 @@ +933|Mahinda|Perera|male|1989-12-03|2010-02-14T15:32:10|119.235.7.103|Firefox +1129|Carmen|Lepland|female|1984-02-18|2010-01-28T06:39:58|195.20.151.175|Internet Explorer +2199023256684|A.|Rao|female|1985-08-02|2010-04-23T22:52:26|49.202.188.25|Firefox +4398046512167|Gustavo|Arbelaez|male|1986-11-02|2010-06-16T20:53:47|190.96.189.165|Chrome +6597069767117|Eli|Peretz|female|1989-01-18|2010-07-23T12:55:52|41.92.31.35|Internet Explorer +10995116278700|Joseph|Anderson|female|1986-01-07|2010-11-06T05:06:52|24.49.203.161|Firefox +17592186045684|Michael|Li|male|1983-04-20|2011-06-05T01:41:59|1.50.202.121|Firefox +21990232556027|Yacine|Abdelli|male|1988-07-26|2011-10-30T03:42:11|82.101.132.47|Firefox +21990232556585|Faisal|Malik|male|1981-02-02|2011-10-23T21:12:23|202.163.114.175|Chrome +24189255812290|Manuel|Alvarez||1985-12-27|2011-11-13T07:28:40|168.165.167.25|Firefox +26388279066636|Jose|Alonso|female|_NULL_|2012-02-15T13:41:01|196.1.135.241|Internet Explorer +28587302322727|Steve|Moore|male|1981-08-28|2012-03-23T00:44:07|18.250.105.113| +30786325578904|Giuseppe|Donati||_NULL_|2012-05-24T12:36:37|46.254.44.230| diff --git a/integration-testing/testdata/cases-int/cases.int.v3.yaml b/integration-testing/testdata/cases-int/cases.int.v3.yaml new file mode 100644 index 00000000..5d2ab405 --- /dev/null +++ b/integration-testing/testdata/cases-int/cases.int.v3.yaml @@ -0,0 +1,65 @@ +client: + version: v3 + address: "graphd1:9669,graphd2:9669" + user: root + password: nebula + concurrencyPerAddress: 1 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: tests_cases_string + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 10s + hooks: + before: + - statements: + - | + UPDATE CONFIGS storage:wal_ttl=3600; + UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; + DROP SPACE IF EXISTS tests_cases_string; + CREATE SPACE IF NOT EXISTS tests_cases_string(partition_num=5, replica_factor=1, vid_type=int);USE tests_cases_string; + CREATE TAG course(name string, credits int); + wait: 15s + after: + - statements: + - | + UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; + UPDATE CONFIGS storage:wal_ttl=86400; + +log: + level: INFO + console: true + +sources: + - path: ./course.csv + batch: 2 + csv: + withHeader: false + tags: + - name: course + id: + index: 0 + type: int + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 2 + - name: course + id: + index: 0 + type: int + function: hash + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 2 diff --git a/integration-testing/testdata/cases-int/course.csv b/integration-testing/testdata/cases-int/course.csv new file mode 100644 index 00000000..30c7fc32 --- /dev/null +++ b/integration-testing/testdata/cases-int/course.csv @@ -0,0 +1,7 @@ +0,Math,3,No5 +1,English,6,No11 +"2",Chinese,1,No1 +3,Test,2,No2 +4,Test2,4,No3 +"5",中国(  ),5,No10 +"6",中国( ),7,No10 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/basic_type_test.csv b/integration-testing/testdata/cases-string/basic_type_test.csv new file mode 100644 index 00000000..440984cb --- /dev/null +++ b/integration-testing/testdata/cases-string/basic_type_test.csv @@ -0,0 +1,12 @@ +b1,true,-1,-2.2,-3.0,str +b2,false,0,0,0.0,0 +b3,true,1,2.0,3.3,abc +b4,false,3,2.0,3.3,0a bd +b5,true,-3,2,3,abcd efg +bnull1,,,,, +bnull2,,,,,__NULL__ +bnull3,,-4,4,4.4, +bnull4,,-4,,4.4, +bnull5,,,4,, +bnull6,,,4,4.4, +bnull7,,,,4.4, \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/cases.string.v3.yaml b/integration-testing/testdata/cases-string/cases.string.v3.yaml new file mode 100644 index 00000000..ee721e18 --- /dev/null +++ b/integration-testing/testdata/cases-string/cases.string.v3.yaml @@ -0,0 +1,543 @@ +client: + version: v3 + address: "graphd1:9669,graphd2:9669" + user: root + password: nebula + concurrencyPerAddress: 1 + reconnectInitialInterval: 1s + retry: 3 + retryInitialInterval: 1s + +manager: + spaceName: tests_cases_string + batch: 128 + readerConcurrency: 50 + importerConcurrency: 512 + statsInterval: 15s + hooks: + before: + - statements: + - | + UPDATE CONFIGS storage:wal_ttl=3600; + UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = true }; + DROP SPACE IF EXISTS tests_cases_string; + CREATE SPACE IF NOT EXISTS tests_cases_string(partition_num=5, replica_factor=1, vid_type=FIXED_STRING(32));USE tests_cases_string; + CREATE TAG course(name string, credits int); + CREATE TAG building(name string); + CREATE TAG student(name string, age int, gender string); + CREATE EDGE follow(likeness double); + CREATE EDGE choose(grade int); + CREATE TAG course_no_props(); + CREATE TAG building_no_props(); + CREATE EDGE follow_no_props(); + CREATE TAG basic_type_test(b bool NULL, i int NULL, f float NULL, d double NULL, s string NULL); + CREATE EDGE edge_basic_type_test(b bool NULL, i int NULL, f float NULL, d double NULL, s string NULL); + CREATE TAG date_test(c1 date NULL, c2 time NULL, c3 datetime NULL, c4 timestamp NULL); + CREATE EDGE edge_date_test(c1 date NULL, c2 time NULL, c3 datetime NULL, c4 timestamp NULL); + CREATE TAG geography_test(any_shape geography NULL, only_point geography(point) NULL, only_linestring geography(linestring) NULL, only_polygon geography(polygon) NULL); + CREATE EDGE edge_geography_test(any_shape geography NULL, only_point geography(point) NULL, only_linestring geography(linestring) NULL, only_polygon geography(polygon) NULL) + wait: 10s + after: + - statements: + - | + UPDATE CONFIGS storage:rocksdb_column_family_options = { disable_auto_compactions = false }; + UPDATE CONFIGS storage:wal_ttl=86400; + +log: + level: INFO + console: true + +sources: + - path: ./choose.csv + batch: 2 + csv: + withHeader: false + edges: + - name: choose + src: + id: + index: 0 + dst: + id: + index: 1 + props: + - name: grade + type: INT + index: 2 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + tags: + - name: course + id: + index: 0 + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 2 + - name: building + id: + index: 0 + props: + - name: name + type: string + index: 1 + + - path: ./course-lazy-quotes.csv + batch: 2 + csv: + withHeader: false + lazyQuotes: true + tags: + - name: course + id: + index: 0 + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 2 + - name: building + id: + index: 0 + props: + - name: name + type: string + index: 1 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + tags: + - name: course + id: + type: string + concatItems: # "c1{index0}c2{index1}2" + - "c1" + - 0 + - c2 + - 1 + - "2" + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 2 + - name: building + id: + type: string + concatItems: # "c1{index0}c2{index1}3" + - "c1" + - 0 + - c2 + - 1 + - "3" + props: + - name: name + type: string + index: 1 + + - path: ./course-with-header.csv + batch: 2 + csv: + withHeader: true + tags: + - name: course + id: + type: string + index: 0 + props: + - name: name + type: string + index: 1 + - name: credits + type: int + index: 4 + - name: building + id: + type: string + index: 0 + props: + - name: name + type: string + index: 2 + + - path: ./follow.csv + batch: 2 + csv: + withHeader: false + edges: + - name: follow + src: + id: + index: 0 + dst: + id: + index: 1 + rank: + index: 2 + props: + - name: likeness + type: double + index: 3 + + - path: ./follow.csv + batch: 2 + csv: + withHeader: false + edges: + - name: follow + src: + id: + concatItems: + - student_ + - 0 + dst: + id: + concatItems: + - student_ + - 1 + props: + - name: likeness + type: double + index: 3 + + - path: ./follow.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + edges: + - name: follow_no_props + src: + id: + index: 0 + dst: + id: + index: 1 + + - path: ./glob-follow-*.csv + batch: 2 + csv: + withHeader: false + edges: + - name: follow + src: + id: + index: 0 + dst: + id: + index: 1 + rank: + index: 2 + props: + - name: likeness + type: double + index: 3 + + - path: ./follow-delimiter.csv + batch: 2 + csv: + withHeader: true + delimiter: "|" + edges: + - name: follow + src: + id: + concatItems: + - student_ + - 0 + dst: + id: + concatItems: + - student_ + - 2 + rank: + index: 3 + props: + - name: likeness + type: double + index: 1 + + - path: ./student.csv + batch: 2 + csv: + withHeader: false + tags: + - name: student + id: + concatItems: + - student_ + - 0 + props: + - name: name + type: string + index: 1 + - name: age + type: int + index: 2 + - name: gender + type: string + index: 3 + - name: student + id: + concatItems: + - student_ + - 0 + props: + - name: age + type: int + index: 2 + - name: name + type: string + index: 1 + - name: gender + type: string + index: 3 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: course_no_props + id: + index: 0 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: course_no_props + id: + index: 0 + - name: building_no_props + id: + index: 0 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: course_no_props + id: + index: 0 + - name: building + id: + index: 0 + props: + - name: name + type: string + index: 3 + + - path: ./course.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: building + id: + concatItems: + - wxyz_ + - 0 + props: + - name: name + type: string + index: 3 + - name: course_no_props + id: + index: 0 + + - path: ./basic_type_test.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: basic_type_test + id: + index: 0 + props: + - name: b + type: bool + index: 1 + nullable: true + - name: i + type: int + index: 2 + nullable: true + defaultValue: "0" + - name: f + type: float + index: 3 + nullable: true + alternativeIndices: + - 2 + - name: d + type: double + index: 4 + nullable: true + alternativeIndices: + - 3 + - 2 + defaultValue: "0" + - name: s + type: string + index: 5 + nullable: true + edges: + - name: edge_basic_type_test + src: + id: + index: 0 + dst: + id: + index: 0 + props: + - name: b + type: bool + index: 1 + nullable: true + - name: i + type: int + index: 2 + nullable: true + - name: f + type: float + index: 3 + nullable: true + alternativeIndices: + - 2 + defaultValue: "0" + - name: d + type: double + index: 4 + nullable: true + alternativeIndices: + - 3 + - 2 + - name: s + type: string + index: 5 + nullable: true + nullValue: "__NULL__" + + - path: ./date_test.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: date_test + id: + index: 0 + props: + - name: c1 + type: date + index: 1 + nullable: true + - name: c2 + type: time + index: 2 + nullable: true + - name: c3 + type: datetime + index: 3 + nullable: true + - name: c4 + type: timestamp + index: 4 + nullable: true + edges: + - name: edge_date_test + src: + id: + index: 0 + dst: + id: + index: 0 + props: + - name: c1 + type: date + index: 1 + nullable: true + - name: c2 + type: time + index: 2 + nullable: true + - name: c3 + type: datetime + index: 3 + nullable: true + - name: c4 + type: timestamp + index: 4 + nullable: true + + - path: ./geography_test.csv + batch: 2 + csv: + withHeader: false + delimiter: "," + tags: + - name: geography_test + id: + index: 0 + props: + - name: any_shape + type: geography + index: 1 + nullable: true + - name: only_point + type: geography(point) + index: 2 + nullable: true + - name: only_linestring + type: geography(linestring) + index: 3 + nullable: true + - name: only_polygon + type: geography(polygon) + index: 4 + nullable: true + edges: + - name: edge_geography_test + src: + id: + index: 0 + dst: + id: + index: 0 + props: + - name: any_shape + type: geography + index: 1 + nullable: true + - name: only_point + type: geography(point) + index: 2 + nullable: true + - name: only_linestring + type: geography(linestring) + index: 3 + nullable: true + - name: only_polygon + type: geography(polygon) + index: 4 + nullable: true \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/choose.csv b/integration-testing/testdata/cases-string/choose.csv new file mode 100644 index 00000000..07535ad1 --- /dev/null +++ b/integration-testing/testdata/cases-string/choose.csv @@ -0,0 +1,4 @@ +x200,x101,5 +x200,y102,3 +y201,y102,3 +z202,y102,3 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/course-lazy-quotes.csv b/integration-testing/testdata/cases-string/course-lazy-quotes.csv new file mode 100644 index 00000000..7ab5436b --- /dev/null +++ b/integration-testing/testdata/cases-string/course-lazy-quotes.csv @@ -0,0 +1,7 @@ +00testLazyQuotes0,a "word",4,No1 +00testLazyQuotes1,a"1"2",4,No1 +00testLazyQuotes2,a",4,No1 +00testLazyQuotes3,a"b,4,No1 +00testLazyQuotes4,a"b,4,No1 +00testLazyQuotes5,a""b,4,No1 +00testLazyQuotes6,"a"b",4,No1 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/course-with-header.csv b/integration-testing/testdata/cases-string/course-with-header.csv new file mode 100644 index 00000000..bdaa0efb --- /dev/null +++ b/integration-testing/testdata/cases-string/course-with-header.csv @@ -0,0 +1,5 @@ +id,course-name,building-name,-,course-credits +English,English,"No11 +B\",2,6 +Math,Math,No5,1,3 +Math,Math,No5,1,3 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/course.csv b/integration-testing/testdata/cases-string/course.csv new file mode 100644 index 00000000..d5b57afc --- /dev/null +++ b/integration-testing/testdata/cases-string/course.csv @@ -0,0 +1,7 @@ +x101,Math,3,No5 +y102,English,6,No11 +"z103",Chinese,1,No1 +0test,Test,2,No2 +00test,Test2,4,No3 +"000test",中国(  ),5,No10 +"0000test",中国( ),7,No10 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/date_test.csv b/integration-testing/testdata/cases-string/date_test.csv new file mode 100644 index 00000000..34980a7b --- /dev/null +++ b/integration-testing/testdata/cases-string/date_test.csv @@ -0,0 +1,3 @@ +d1,2020-01-01,18:28:23.284,2020-01-01T18:28:23.284,2020-01-01T18:28:23 +d2,2020-01-02,18:38:23.284,2020-01-11T19:28:23.284,1578770903 +dnull,,,, \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/follow-delimiter.csv b/integration-testing/testdata/cases-string/follow-delimiter.csv new file mode 100644 index 00000000..5a1b038f --- /dev/null +++ b/integration-testing/testdata/cases-string/follow-delimiter.csv @@ -0,0 +1,3 @@ +x201|92.5|y200|0 +y200|85.6|x201|1 +z202|93.2|x201|2 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/follow.csv b/integration-testing/testdata/cases-string/follow.csv new file mode 100644 index 00000000..47208e0b --- /dev/null +++ b/integration-testing/testdata/cases-string/follow.csv @@ -0,0 +1,4 @@ +x200,y201,0,92.5 +y201,x200,1,85.6 +y201,z202,2,93.2 +y201,z202,1,96.2 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/geography_test.csv b/integration-testing/testdata/cases-string/geography_test.csv new file mode 100644 index 00000000..a5e49192 --- /dev/null +++ b/integration-testing/testdata/cases-string/geography_test.csv @@ -0,0 +1,4 @@ +g1,POINT (-82.3764154 42.6452196),"Point(3 8)","LINEstring (-108.7 35.0,-100.0 46.5,-90.7 34.9,-108.7 35.0)","POlygon ( (-100.1 41.4,-102.9 37.6,-96.8 37.5,-100.1 41.4))" +g2,"LineString(0 1, 1 2, 2 3)","point(4.6 5.7 )","LINESTRING(43.8 52.6, -78.99 84.323)","POLYGON ((-108.7 35.0,-100.0 46.5,-90.7 34.9,-108.7 35.0))" +g3,"Polygon((-85.1 34.8,-80.7 28.4,-76.9 34.9,-85.1 34.8))", Point(0.0 0.0),"linestring(0 1, 179.99 89.99)","polygon((0 1, 2 4, 3 5, 4 9, 0 1))" +gnull,,,, \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/glob-follow-1.csv b/integration-testing/testdata/cases-string/glob-follow-1.csv new file mode 100644 index 00000000..95723cc2 --- /dev/null +++ b/integration-testing/testdata/cases-string/glob-follow-1.csv @@ -0,0 +1,3 @@ +x200,y201,0,92.5 +y201,x200,1,85.6 +y201,z202,2,93.2 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/glob-follow-2.csv b/integration-testing/testdata/cases-string/glob-follow-2.csv new file mode 100644 index 00000000..4ed5ab2f --- /dev/null +++ b/integration-testing/testdata/cases-string/glob-follow-2.csv @@ -0,0 +1 @@ +y201,z202,1,96.2 \ No newline at end of file diff --git a/integration-testing/testdata/cases-string/student.csv b/integration-testing/testdata/cases-string/student.csv new file mode 100644 index 00000000..5c5e5111 --- /dev/null +++ b/integration-testing/testdata/cases-string/student.csv @@ -0,0 +1,3 @@ +x200,Monica,16,female +y201,Mike,18,male +z202,Jane,17,female \ No newline at end of file diff --git a/pkg/cmd/nebula-importer.go b/pkg/cmd/nebula-importer.go index b5dfd94e..f4d38cae 100644 --- a/pkg/cmd/nebula-importer.go +++ b/pkg/cmd/nebula-importer.go @@ -1,6 +1,7 @@ package cmd import ( + "fmt" "os" "github.com/vesoft-inc/nebula-importer/v4/pkg/client" @@ -9,6 +10,7 @@ import ( "github.com/vesoft-inc/nebula-importer/v4/pkg/errors" "github.com/vesoft-inc/nebula-importer/v4/pkg/logger" "github.com/vesoft-inc/nebula-importer/v4/pkg/manager" + "github.com/vesoft-inc/nebula-importer/v4/pkg/version" "github.com/spf13/cobra" ) @@ -44,7 +46,7 @@ func NewDefaultImporterCommand() *cobra.Command { func NewImporterCommand(o *ImporterOptions) *cobra.Command { cmd := &cobra.Command{ Use: "nebula-importer", - Short: `The Nebula Importer Tool.`, + Short: `The NebulaGraph Importer Tool.`, RunE: func(cmd *cobra.Command, args []string) (err error) { defer func() { if err != nil { @@ -76,7 +78,13 @@ func NewImporterCommand(o *ImporterOptions) *cobra.Command { } return o.Run(cmd, args) }, + Version: version.GetVersion().String(), + SilenceErrors: true, + SilenceUsage: true, } + + cmd.SetVersionTemplate("{{.Version}}") + o.AddFlags(cmd) return cmd } @@ -115,6 +123,9 @@ func (o *ImporterOptions) Run(_ *cobra.Command, _ []string) error { if err := o.mgr.Wait(); err != nil { return err } + if o.mgr.Stats().IsFailed() { + return fmt.Errorf("failed to import") + } return nil } diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 7539faa4..e42e4b87 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -11,8 +11,8 @@ type ( Info(msg string, fields ...Field) Warn(msg string, fields ...Field) Error(msg string, fields ...Field) - Fatal(msg string, fields ...Field) Panic(msg string, fields ...Field) + Fatal(msg string, fields ...Field) Sync() error Close() error diff --git a/pkg/logger/nop.go b/pkg/logger/nop.go index bde62d2d..ab33a840 100644 --- a/pkg/logger/nop.go +++ b/pkg/logger/nop.go @@ -13,8 +13,8 @@ func (nopLogger) Debug(string, ...Field) {} func (nopLogger) Info(string, ...Field) {} func (nopLogger) Warn(string, ...Field) {} func (nopLogger) Error(string, ...Field) {} -func (nopLogger) Fatal(string, ...Field) {} func (nopLogger) Panic(string, ...Field) {} +func (nopLogger) Fatal(string, ...Field) {} func (nopLogger) Sync() error { return nil } func (nopLogger) Close() error { return nil } diff --git a/pkg/logger/zap.go b/pkg/logger/zap.go index 5478b119..be0516c0 100644 --- a/pkg/logger/zap.go +++ b/pkg/logger/zap.go @@ -90,14 +90,14 @@ func (l *zapLogger) Error(msg string, fields ...Field) { l.l.Error(msg, toZapFields(fields...)...) } -func (l *zapLogger) Fatal(msg string, fields ...Field) { - l.l.Fatal(msg, toZapFields(fields...)...) -} - func (l *zapLogger) Panic(msg string, fields ...Field) { l.l.Panic(msg, toZapFields(fields...)...) } +func (l *zapLogger) Fatal(msg string, fields ...Field) { + l.l.Fatal(msg, toZapFields(fields...)...) +} + func (l *zapLogger) Sync() error { return l.l.Sync() } diff --git a/pkg/source/oss.go b/pkg/source/oss.go index 8d177cab..10fc026c 100644 --- a/pkg/source/oss.go +++ b/pkg/source/oss.go @@ -13,11 +13,11 @@ var _ Source = (*ossSource)(nil) type ( OSSConfig struct { - Endpoint string `yaml:"endpoint,omitempty"` - AccessKey string `yaml:"accessKey,omitempty"` - SecretKey string `yaml:"secretKey,omitempty"` - Bucket string `yaml:"bucket,omitempty"` - Key string `yaml:"key,omitempty"` + Endpoint string `yaml:"endpoint,omitempty"` + AccessKeyID string `yaml:"accessKeyID,omitempty"` + AccessKeySecret string `yaml:"accessKeySecret,omitempty"` + Bucket string `yaml:"bucket,omitempty"` + Key string `yaml:"key,omitempty"` } ossSource struct { @@ -39,7 +39,7 @@ func (s *ossSource) Name() string { } func (s *ossSource) Open() error { - cli, err := oss.New(s.c.OSS.Endpoint, s.c.OSS.AccessKey, s.c.OSS.SecretKey) + cli, err := oss.New(s.c.OSS.Endpoint, s.c.OSS.AccessKeyID, s.c.OSS.AccessKeySecret) if err != nil { return err } diff --git a/pkg/source/oss_test.go b/pkg/source/oss_test.go index fead3c57..c1dcc167 100644 --- a/pkg/source/oss_test.go +++ b/pkg/source/oss_test.go @@ -38,11 +38,11 @@ var _ = Describe("ossSource", func() { c := Config{ OSS: &OSSConfig{ - Endpoint: httpServer.URL, - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "bucket", + Key: "key", }, } @@ -80,11 +80,11 @@ var _ = Describe("ossSource", func() { It("oss.New failed", func() { c := Config{ OSS: &OSSConfig{ - Endpoint: "\t", - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "bucket", - Key: "key", + Endpoint: "\t", + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "bucket", + Key: "key", }, } @@ -99,11 +99,11 @@ var _ = Describe("ossSource", func() { It("Bucket failed", func() { c := Config{ OSS: &OSSConfig{ - Endpoint: httpServer.URL, - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "b", - Key: "key", + Endpoint: httpServer.URL, + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "b", + Key: "key", }, } @@ -121,11 +121,11 @@ var _ = Describe("ossSource", func() { }) c := Config{ OSS: &OSSConfig{ - Endpoint: httpServer.URL, - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "bucket", + Key: "key", }, } @@ -151,11 +151,11 @@ var _ = Describe("ossSource", func() { }) c := Config{ OSS: &OSSConfig{ - Endpoint: httpServer.URL, - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "bucket", + Key: "key", }, } @@ -185,11 +185,11 @@ var _ = Describe("ossSource", func() { }) c := Config{ OSS: &OSSConfig{ - Endpoint: httpServer.URL, - AccessKey: "accessKey", - SecretKey: "secretKey", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Bucket: "bucket", + Key: "key", }, } diff --git a/pkg/source/s3.go b/pkg/source/s3.go index a8abdabe..88205a1f 100644 --- a/pkg/source/s3.go +++ b/pkg/source/s3.go @@ -14,13 +14,13 @@ var _ Source = (*s3Source)(nil) type ( S3Config struct { - Endpoint string `yaml:"endpoint,omitempty"` - Region string `yaml:"region,omitempty"` - AccessKey string `yaml:"accessKey,omitempty"` - SecretKey string `yaml:"secretKey,omitempty"` - Token string `yaml:"token,omitempty"` - Bucket string `yaml:"bucket,omitempty"` - Key string `yaml:"key,omitempty"` + Endpoint string `yaml:"endpoint,omitempty"` + Region string `yaml:"region,omitempty"` + AccessKeyID string `yaml:"accessKeyID,omitempty"` + AccessKeySecret string `yaml:"accessKeySecret,omitempty"` + Token string `yaml:"token,omitempty"` + Bucket string `yaml:"bucket,omitempty"` + Key string `yaml:"key,omitempty"` } s3Source struct { @@ -46,8 +46,8 @@ func (s *s3Source) Open() error { S3ForcePathStyle: aws.Bool(true), } - if s.c.S3.AccessKey != "" || s.c.S3.SecretKey != "" || s.c.S3.Token != "" { - awsConfig.Credentials = credentials.NewStaticCredentials(s.c.S3.AccessKey, s.c.S3.SecretKey, s.c.S3.Token) + if s.c.S3.AccessKeyID != "" || s.c.S3.AccessKeySecret != "" || s.c.S3.Token != "" { + awsConfig.Credentials = credentials.NewStaticCredentials(s.c.S3.AccessKeyID, s.c.S3.AccessKeySecret, s.c.S3.Token) } sess, err := session.NewSession(awsConfig) diff --git a/pkg/source/s3_test.go b/pkg/source/s3_test.go index 7ccf49ca..76408084 100644 --- a/pkg/source/s3_test.go +++ b/pkg/source/s3_test.go @@ -38,13 +38,13 @@ var _ = Describe("s3Source", func() { c := Config{ S3: &S3Config{ - Endpoint: httpServer.URL, - Region: "us-west-2", - AccessKey: "accessKey", - SecretKey: "secretKey", - Token: "token", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + Region: "us-west-2", + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Token: "token", + Bucket: "bucket", + Key: "key", }, } @@ -85,13 +85,13 @@ var _ = Describe("s3Source", func() { }) c := Config{ S3: &S3Config{ - Endpoint: httpServer.URL, - Region: "us-west-2", - AccessKey: "accessKey", - SecretKey: "secretKey", - Token: "token", - Bucket: "bucket", - Key: "key", + Endpoint: httpServer.URL, + Region: "us-west-2", + AccessKeyID: "accessKeyID", + AccessKeySecret: "accessKeySecret", + Token: "token", + Bucket: "bucket", + Key: "key", }, } diff --git a/pkg/spec/v3/value.go b/pkg/spec/v3/value.go index f0b340e9..1df6fd52 100644 --- a/pkg/spec/v3/value.go +++ b/pkg/spec/v3/value.go @@ -13,6 +13,7 @@ const ( ValueTypeFloat ValueType = "FLOAT" ValueTypeDouble ValueType = "DOUBLE" ValueTypeDate ValueType = "DATE" + ValueTypeTime ValueType = "TIME" ValueTypeDateTime ValueType = "DATETIME" ValueTypeTimestamp ValueType = "TIMESTAMP" ValueTypeGeo ValueType = "GEOGRAPHY" @@ -31,6 +32,7 @@ var ( ValueTypeFloat: {}, ValueTypeDouble: {}, ValueTypeDate: {}, + ValueTypeTime: {}, ValueTypeDateTime: {}, ValueTypeTimestamp: {}, ValueTypeGeo: {}, diff --git a/pkg/spec/v3/value_test.go b/pkg/spec/v3/value_test.go index f9d11968..16dad606 100644 --- a/pkg/spec/v3/value_test.go +++ b/pkg/spec/v3/value_test.go @@ -17,6 +17,7 @@ var _ = Describe("Value", func() { Entry(nil, ValueTypeFloat, true), Entry(nil, ValueTypeDouble, true), Entry(nil, ValueTypeDate, true), + Entry(nil, ValueTypeTime, true), Entry(nil, ValueTypeDateTime, true), Entry(nil, ValueTypeTimestamp, true), Entry(nil, ValueTypeGeo, true), diff --git a/pkg/stats/stats.go b/pkg/stats/stats.go index 9c4a1913..6d981809 100644 --- a/pkg/stats/stats.go +++ b/pkg/stats/stats.go @@ -23,6 +23,10 @@ type ( } ) +func (s *Stats) IsFailed() bool { + return s.FailedRecords > 0 || s.FailedRequest > 0 || s.FailedProcessed > 0 +} + func (s *Stats) Percentage() float64 { if s.TotalBytes == 0 { return 0 diff --git a/pkg/stats/stats_test.go b/pkg/stats/stats_test.go index 947d27f6..8e4e61ce 100644 --- a/pkg/stats/stats_test.go +++ b/pkg/stats/stats_test.go @@ -29,6 +29,7 @@ var _ = Describe("Stats", func() { FailedProcessed: 2, TotalProcessed: 5, } + Expect(s.IsFailed()).To(Equal(true)) Expect(s.String()).Should(Equal("10s 20s 33.33%(100 KiB/300 KiB) Records{Finished: 1234, Failed: 23, Rate: 123.40/s}, Requests{Finished: 12, Failed: 1, Latency: 1s/2s, Rate: 1.20/s}, Processed{Finished: 5, Failed: 2, Rate: 0.50/s}")) }) })