diff --git a/.asf.yaml b/.asf.yaml
index 6c78530425..bb5d874a3f 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -34,14 +34,15 @@ github:
protected_branches:
master:
required_status_checks:
- # strict means "Require branches to be up-to-date before merging". (TODO: turnoff when branch is stable)
- strict: true
+ # strict means "Require PR to be up-to-date before merging". (enable when branch unstable)
+ strict: false
# contexts are the names of checks that must pass (now only enable the basic check)
contexts:
- Analyze (java)
- CodeQL
- check-license
- - build (memory, 11)
+ - build-server (memory, 11)
+ - build-commons (11)
required_pull_request_reviews:
dismiss_stale_reviews: true
require_code_owner_reviews: false
diff --git a/.gitattributes b/.gitattributes
index ca5e57db70..85f64d198b 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -12,4 +12,5 @@ hugegraph-store/hg-store-dist/src/assembly/static/bin/libjemalloc_aarch64.so exp
.github/ export-ignore
.idea/ export-ignore
install-dist/scripts/ export-ignore
+hugegraph-commons/hugegraph-dist/ export-ignore
docker/ export-ignore
diff --git a/.github/workflows/check-dependencies.yml b/.github/workflows/check-dependencies.yml
index 6e3c572889..68f8c0e0c9 100644
--- a/.github/workflows/check-dependencies.yml
+++ b/.github/workflows/check-dependencies.yml
@@ -32,7 +32,7 @@ jobs:
- name: mvn install
run: |
- mvn install -DskipTests=true -ntp
+ mvn install -Dmaven.test.skip=true -ntp
- name: generate current dependencies
run: |
bash $SCRIPT_DEPENDENCY/regenerate_known_dependencies.sh current-dependencies.txt
diff --git a/.github/workflows/cluster-test-ci.yml b/.github/workflows/cluster-test-ci.yml
new file mode 100644
index 0000000000..7abebc7224
--- /dev/null
+++ b/.github/workflows/cluster-test-ci.yml
@@ -0,0 +1,52 @@
+name: "Cluster Test CI"
+
+on:
+ push:
+ branches:
+ - master
+ - 'release-*'
+ - 'test-*'
+ pull_request:
+
+jobs:
+ cluster-test:
+ runs-on: ubuntu-latest
+ env:
+ USE_STAGE: 'true' # Whether to include the stage repository.
+
+ steps:
+ - name: Install JDK 11
+ uses: actions/setup-java@v3
+ with:
+ java-version: '11'
+ distribution: 'zulu'
+
+ - name: Cache Maven packages
+ uses: actions/cache@v3
+ with:
+ path: ~/.m2
+ key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+ restore-keys: ${{ runner.os }}-m2
+
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 5
+
+ - name: use staged maven repo settings
+ if: ${{ env.USE_STAGE == 'true' }}
+ run: |
+ cp $HOME/.m2/settings.xml /tmp/settings.xml
+ mv -vf .github/configs/settings.xml $HOME/.m2/settings.xml
+
+ - name: Package
+ run: |
+ mvn clean package -U -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -ntp
+
+ - name: Run simple cluster test
+ run: |
+ mvn test -pl hugegraph-cluster-test/hugegraph-clustertest-test -am -P simple-cluster-test -DskipCommonsTests=true
+
+ - name: Run multi cluster test
+ run: |
+ mvn test -pl hugegraph-cluster-test/hugegraph-clustertest-test -am -P multi-cluster-test -DskipCommonsTests=true
diff --git a/.github/workflows/commons-ci.yml b/.github/workflows/commons-ci.yml
new file mode 100644
index 0000000000..7b781dd4a5
--- /dev/null
+++ b/.github/workflows/commons-ci.yml
@@ -0,0 +1,64 @@
+name: "HugeGraph-Commons CI"
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - master
+ - /^release-.*$/
+ - /^test-.*$/
+ pull_request:
+
+jobs:
+ build-commons:
+ runs-on: ubuntu-latest
+ env:
+ # TODO: reset use stage to false later
+ USE_STAGE: 'true' # Whether to include the stage repository.
+
+ strategy:
+ fail-fast: false
+ matrix:
+ JAVA_VERSION: ['11']
+
+ steps:
+ - name: Install JDK ${{ matrix.JAVA_VERSION }}
+ uses: actions/setup-java@v3
+ with:
+ java-version: ${{ matrix.JAVA_VERSION }}
+ distribution: 'zulu'
+
+ - name: Cache Maven packages
+ uses: actions/cache@v3
+ with:
+ path: ~/.m2
+ key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+ restore-keys: ${{ runner.os }}-m2
+
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 2
+
+ - name: Use staged maven repo settings
+ if: ${{ env.USE_STAGE == 'true' }}
+ run: |
+ cp $HOME/.m2/settings.xml /tmp/settings.xml
+ cp -vf .github/configs/settings.xml $HOME/.m2/settings.xml && cat $HOME/.m2/settings.xml
+
+ - name: Compile
+ run: |
+ mvn compile -Dmaven.javadoc.skip=true -ntp
+
+ - name: Run common test
+ run: |
+ mvn test -pl hugegraph-commons/hugegraph-common -Dtest=UnitTestSuite -DskipCommonsTests=false
+
+ - name: Run rpc test
+ run: |
+ mvn test -pl hugegraph-commons/hugegraph-rpc -Dtest=UnitTestSuite -DskipCommonsTests=false
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v3.0.0
+ with:
+ file: target/jacoco.xml
diff --git a/.github/workflows/pd-store-ci.yml b/.github/workflows/pd-store-ci.yml
index c0f4825251..44d4456920 100644
--- a/.github/workflows/pd-store-ci.yml
+++ b/.github/workflows/pd-store-ci.yml
@@ -1,4 +1,4 @@
-name: "Graph PD & Store & Hstore CI"
+name: "HugeGraph-PD & Store & Hstore CI"
on:
push:
@@ -14,7 +14,8 @@ jobs:
runs-on: ubuntu-latest
env:
# TODO: avoid duplicated env setup in pd & store
- USE_STAGE: 'false' # Whether to include the stage repository.
+ # TODO: reset use stage to false later
+ USE_STAGE: 'true' # Whether to include the stage repository.
# TODO: remove outdated env
TRAVIS_DIR: hugegraph-server/hugegraph-dist/src/assembly/travis
REPORT_DIR: target/site/jacoco
@@ -46,11 +47,11 @@ jobs:
- name: Run common test
run: |
- mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-common-test
+ mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-common-test -DskipCommonsTests=true
- name: Run core test
run: |
- mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-core-test
+ mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-core-test -DskipCommonsTests=true
# The above tests do not require starting a PD instance.
@@ -64,11 +65,11 @@ jobs:
- name: Run client test
run: |
- mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-client-test
+ mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-client-test -DskipCommonsTests=true
- name: Run rest test
run: |
- mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-rest-test
+ mvn test -pl hugegraph-pd/hg-pd-test -am -P pd-rest-test -DskipCommonsTests=true
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3.0.0
@@ -79,7 +80,7 @@ jobs:
# TODO: avoid duplicated env setup
runs-on: ubuntu-latest
env:
- USE_STAGE: 'false' # Whether to include the stage repository.
+ USE_STAGE: 'true' # Whether to include the stage repository.
# TODO: remove outdated env
TRAVIS_DIR: hugegraph-server/hugegraph-dist/src/assembly/travis
REPORT_DIR: target/site/jacoco
@@ -120,27 +121,27 @@ jobs:
- name: Run common test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-common-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-common-test -DskipCommonsTests=true
- name: Run client test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-client-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-client-test -DskipCommonsTests=true
- name: Run core test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-core-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-core-test -DskipCommonsTests=true
- name: Run rocksdb test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-rocksdb-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-rocksdb-test -DskipCommonsTests=true
- name: Run server test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-server-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-server-test -DskipCommonsTests=true
- name: Run raft-core test
run: |
- mvn test -pl hugegraph-store/hg-store-test -am -P store-raftcore-test
+ mvn test -pl hugegraph-store/hg-store-test -am -P store-raftcore-test -DskipCommonsTests=true
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3.0.0
diff --git a/.github/workflows/server-ci.yml b/.github/workflows/server-ci.yml
index bbf8a5eab6..d640124ff1 100644
--- a/.github/workflows/server-ci.yml
+++ b/.github/workflows/server-ci.yml
@@ -1,4 +1,4 @@
-name: "Graph Server CI"
+name: "HugeGraph-Server CI"
on:
push:
@@ -9,11 +9,12 @@ on:
pull_request:
jobs:
- build:
+ build-server:
# TODO: we need test & replace it to ubuntu-24.04 or ubuntu-latest
runs-on: ubuntu-20.04
env:
- USE_STAGE: 'false' # Whether to include the stage repository.
+ # TODO: reset use stage to false later
+ USE_STAGE: 'true' # Whether to include the stage repository.
TRAVIS_DIR: hugegraph-server/hugegraph-dist/src/assembly/travis
REPORT_DIR: target/site/jacoco
BACKEND: ${{ matrix.BACKEND }}
diff --git a/BUILDING.md b/BUILDING.md
index b7342e68d6..d4c807c748 100644
--- a/BUILDING.md
+++ b/BUILDING.md
@@ -6,7 +6,7 @@ Required:
* Java 11
* Maven 3.5+
-To build without executing tests: `mvn clean package -DskipTests`
+To build without executing tests: `mvn clean package -Dmaven.test.skip=true`
## Building in IDEA
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml b/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml
new file mode 100644
index 0000000000..20e3efc599
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml
@@ -0,0 +1,78 @@
+
+
+
+
+ 4.0.0
+
+ org.apache.hugegraph
+ hugegraph-cluster-test
+ ${revision}
+ ../pom.xml
+
+
+ hugegraph-clustertest-dist
+
+
+ ${project.parent.basedir}
+ bash
+ ${project.basedir}/src/assembly
+ ${assembly.dir}/descriptor
+ ${assembly.dir}/static
+ hg-ct
+
+
+
+
+
+ maven-assembly-plugin
+ 2.4
+
+
+ assembly-hugegraph-ct
+ package
+
+ single
+
+
+ false
+ false
+ ${dist.dir}
+
+
+ ${assembly.descriptor.dir}/assembly.xml
+
+
+ ${final.name}
+
+
+
+
+
+
+
+
+
+ org.apache.hugegraph
+ hugegraph-clustertest-minicluster
+ ${revision}
+
+
+
+
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml
new file mode 100644
index 0000000000..3db49f4266
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml
@@ -0,0 +1,50 @@
+
+
+
+ distribution
+ false
+
+
+ dir
+
+
+
+
+ ${assembly.static.dir}
+ /
+
+ **/*
+
+
+
+
+
+
+
+
+ /lib
+ false
+ runtime
+ false
+
+ org.apache.hugegraph:${executable.jar.name}:jar:*
+
+
+
+
+
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template
new file mode 100644
index 0000000000..8eaf0adffb
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# gremlin entrance to create graph
+# auth config: org.apache.hugegraph.auth.HugeFactoryAuthProxy
+gremlin.graph=org.apache.hugegraph.HugeFactory
+
+# cache config
+#schema.cache_capacity=100000
+# vertex-cache default is 1000w, 10min expired
+vertex.cache_type=l2
+#vertex.cache_capacity=10000000
+#vertex.cache_expire=600
+# edge-cache default is 100w, 10min expired
+edge.cache_type=l2
+#edge.cache_capacity=1000000
+#edge.cache_expire=600
+
+
+# schema illegal name template
+#schema.illegal_name_regex=\s+|~.*
+
+#vertex.default_label=vertex
+
+backend=hstore
+serializer=binary
+
+store=hugegraph
+
+# pd config
+pd.peers=$PD_PEERS_LIST$
+
+# task config
+task.scheduler_type=local
+task.schedule_period=10
+task.retry=0
+task.wait_timeout=10
+
+# raft config
+raft.mode=false
+raft.path=./raft-log
+raft.safe_read=true
+raft.use_replicator_pipeline=true
+raft.election_timeout=10000
+raft.snapshot_interval=3600
+raft.backend_threads=48
+raft.read_index_threads=8
+raft.snapshot_threads=4
+raft.snapshot_parallel_compress=false
+raft.snapshot_compress_threads=4
+raft.snapshot_decompress_threads=4
+raft.read_strategy=ReadOnlyLeaseBased
+raft.queue_size=16384
+raft.queue_publish_timeout=60
+raft.apply_batch=1
+raft.rpc_threads=80
+raft.rpc_connect_timeout=5000
+raft.rpc_timeout=60
+raft.install_snapshot_rpc_timeout=36000
+
+# search config
+search.text_analyzer=jieba
+search.text_analyzer_mode=INDEX
+
+# rocksdb backend config
+#rocksdb.data_path=/path/to/disk
+#rocksdb.wal_path=/path/to/disk
+
+
+# cassandra backend config
+cassandra.host=localhost
+cassandra.port=9042
+cassandra.username=
+cassandra.password=
+#cassandra.connect_timeout=5
+#cassandra.read_timeout=20
+#cassandra.keyspace.strategy=SimpleStrategy
+#cassandra.keyspace.replication=3
+
+# hbase backend config
+#hbase.hosts=localhost
+#hbase.port=2181
+#hbase.znode_parent=/hbase
+#hbase.threads_max=64
+# IMPORTANT: recommend to modify the HBase partition number
+# by the actual/env data amount & RS amount before init store
+# It will influence the load speed a lot
+#hbase.enable_partition=true
+#hbase.vertex_partitions=10
+#hbase.edge_partitions=30
+
+# mysql backend config
+#jdbc.driver=com.mysql.jdbc.Driver
+#jdbc.url=jdbc:mysql://127.0.0.1:3306
+#jdbc.username=root
+#jdbc.password=
+#jdbc.reconnect_max_times=3
+#jdbc.reconnect_interval=3
+#jdbc.ssl_mode=false
+
+# postgresql & cockroachdb backend config
+#jdbc.driver=org.postgresql.Driver
+#jdbc.url=jdbc:postgresql://localhost:5432/
+#jdbc.username=postgres
+#jdbc.password=
+#jdbc.postgresql.connect_database=template1
+
+# palo backend config
+#palo.host=127.0.0.1
+#palo.poll_interval=10
+#palo.temp_dir=./palo-data
+#palo.file_limit_size=32
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template
new file mode 100644
index 0000000000..87229aabcf
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template
@@ -0,0 +1,80 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+spring:
+ application:
+ name: hugegraph-pd
+
+management:
+ metrics:
+ export:
+ prometheus:
+ enabled: true
+ endpoints:
+ web:
+ exposure:
+ include: "*"
+
+logging:
+ config: 'file:./conf/log4j2.xml'
+license:
+ verify-path: ./conf/verify-license.json
+ license-path: ./conf/hugegraph.license
+grpc:
+ port: $GRPC_PORT$
+ # The service address of grpc needs to be changed to the actual local IPv4 address when deploying.
+ host: 127.0.0.1
+
+server:
+ # REST service port number
+ port : $REST_PORT$
+
+pd:
+ # Storage path
+ data-path: ./pd_data
+ # The check cycle of automatic expansion regularly checks the number of partitions in each store and automatically balances the number of partitions
+ patrol-interval: 1800
+ # The minimum number of surviving store nodes, less than which the entire cluster is unavailable
+ initial-store-count: $STORE_COUNT$
+ # The initial store list, grpc IP: grpc port, the store in the list is automatically activated
+ initial-store-list: $STORE_GRPC_LIST$
+
+
+raft:
+ # The address of the local raft service
+ address: $RAFT_ADDRESS$
+ # The service address of the PD cluster
+ peers-list: $RAFT_PEERS_LIST$
+
+store:
+ # The time when the store went offline. After that time, the store is considered permanently unavailable, and the replica is allocated to another machine, in seconds
+ max-down-time: 172800
+ # Specifies whether to enable store monitoring data storage
+ monitor_data_enabled: true
+ # The interval between monitoring data, minute, hour, second
+ # default: 1 min * 1 day = 1440
+ monitor_data_interval: 1 minute
+ # Retention time of monitoring data is 1 day; day, month, year
+ monitor_data_retention: 1 day
+ initial-store-count: 1
+
+partition:
+ # Default number of replicas per partition
+ default-shard-count: 1
+ # The default maximum number of replicas per machine
+ # the initial number of partitions= store-max-shard-count * store-number / default-shard-count
+ store-max-shard-count: 12
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template
new file mode 100644
index 0000000000..8f4e9bf616
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template
@@ -0,0 +1,71 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# bind url
+# could use '0.0.0.0' or specified (real)IP to expose external network access
+restserver.url=http://$REST_SERVER_ADDRESS$
+# gremlin server url, need to be consistent with host and port in gremlin-server.yaml
+#gremlinserver.url=http://$REST_SERVER_ADDRESS$
+
+graphs=./conf/graphs
+
+# The maximum thread ratio for batch writing, only take effect if the batch.max_write_threads is 0
+batch.max_write_ratio=80
+batch.max_write_threads=0
+
+# configuration of arthas
+arthas.telnet_port=8562
+arthas.http_port=8561
+arthas.ip=127.0.0.1
+arthas.disabled_commands=jad
+
+# authentication configs
+# choose 'org.apache.hugegraph.auth.StandardAuthenticator' or
+# 'org.apache.hugegraph.auth.ConfigAuthenticator'
+#auth.authenticator=
+
+# for StandardAuthenticator mode
+#auth.graph_store=hugegraph
+# auth client config
+#auth.remote_url=127.0.0.1:8899,127.0.0.1:8898,127.0.0.1:8897
+
+# for ConfigAuthenticator mode
+#auth.admin_token=
+#auth.user_tokens=[]
+
+# rpc server configs for multi graph-servers or raft-servers
+rpc.server_host=127.0.0.1
+rpc.server_port=$RPC_PORT$
+#rpc.server_timeout=30
+
+# rpc client configs (like enable to keep cache consistency)
+#rpc.remote_url=127.0.0.1:8091,127.0.0.1:8092,127.0.0.1:8093
+#rpc.client_connect_timeout=20
+#rpc.client_reconnect_period=10
+#rpc.client_read_timeout=40
+#rpc.client_retries=3
+#rpc.client_load_balancer=consistentHash
+
+# raft group initial peers
+#raft.group_peers=127.0.0.1:8091,127.0.0.1:8092,127.0.0.1:8093
+
+# lightweight load balancing (beta)
+server.id=$SERVER_ID$
+server.role=$ROLE$
+
+# slow query log
+log.slow_query_threshold=1000
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template
new file mode 100644
index 0000000000..93ceb76386
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+pdserver:
+ # PD service address, multiple PD addresses separated by commas
+ address: $PD_SERVER_ADDRESS$
+
+management:
+ metrics:
+ export:
+ prometheus:
+ enabled: true
+ endpoints:
+ web:
+ exposure:
+ include: "*"
+
+grpc:
+ # grpc service address
+ host: 127.0.0.1
+ port: $GRPC_PORT$
+ netty-server:
+ max-inbound-message-size: 1000MB
+raft:
+ # raft cache queue size
+ disruptorBufferSize: 1024
+ address: $RAFT_ADDRESS$
+ max-log-file-size: 600000000000
+ # Snapshot generation interval, in seconds
+ snapshotInterval: 1800
+server:
+ # rest service address
+ port: $REST_PORT$
+
+app:
+ # Storage path, support multiple paths, separated by commas
+ data-path: ./storage
+ #raft-path: ./storage
+
+spring:
+ application:
+ name: store-node-grpc-server
+ profiles:
+ active: default
+ include: pd
+
+logging:
+ config: 'file:./conf/log4j2.xml'
+ level:
+ root: info
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml
new file mode 100644
index 0000000000..8feb6181f2
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml
@@ -0,0 +1,66 @@
+
+
+
+
+ 4.0.0
+ hugegraph-clustertest-minicluster
+
+
+ org.apache.hugegraph
+ hugegraph-cluster-test
+ ${revision}
+
+
+
+
+ 11
+ 11
+ UTF-8
+ 2.17.0
+
+
+
+
+ org.apache.commons
+ commons-lang3
+ 3.13.0
+ compile
+
+
+ commons-io
+ commons-io
+ 2.12.0
+ compile
+
+
+ org.slf4j
+ slf4j-api
+ 2.0.9
+ compile
+
+
+ org.projectlombok
+ lombok
+ 1.18.24
+ compile
+
+
+
+
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java
new file mode 100644
index 0000000000..9120c0cf92
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Objects;
+
+import org.apache.commons.lang3.SystemUtils;
+
+public class ClusterConstant {
+
+ public static final String LOG = "logs";
+ public static final String PROJECT_DIR = getProjectDir();
+ public static final String LIB_DIR = "lib";
+ public static final String EXT_DIR = "ext";
+ public static final String PLUGINS_DIR = "plugins";
+ public static final String BIN_DIR = "bin";
+ public static final String CONF_DIR = "conf";
+ public static final String PD_PACKAGE_PREFIX = "apache-hugegraph-pd-incubating";
+ public static final String PD_JAR_PREFIX = "hg-pd-service";
+ public static final String STORE_PACKAGE_PREFIX = "apache-hugegraph-store-incubating";
+ public static final String STORE_JAR_PREFIX = "hg-store-node";
+ public static final String SERVER_PACKAGE_PREFIX = "apache-hugegraph-server-incubating";
+ public static final String CT_PACKAGE_PREFIX = "apache-hugegraph-ct-incubating";
+ public static final String APPLICATION_FILE = "application.yml";
+ public static final String SERVER_PROPERTIES = "rest-server.properties";
+ public static final String HUGEGRAPH_PROPERTIES = "graphs/hugegraph.properties";
+ public static final String LOG4J_FILE = "log4j2.xml";
+ public static final String PD_TEMPLATE_FILE = "pd-application.yml.template";
+ public static final String STORE_TEMPLATE_FILE = "store-application.yml.template";
+ public static final String SERVER_TEMPLATE_FILE = "rest-server.properties.template";
+ public static final String GRAPH_TEMPLATE_FILE = "hugegraph.properties.template";
+ public static final String GREMLIN_DRIVER_SETTING_FILE = "gremlin-driver-settings.yaml";
+ public static final String GREMLIN_SERVER_FILE = "gremlin-server.yaml";
+ public static final String REMOTE_SETTING_FILE = "remote.yaml";
+ public static final String REMOTE_OBJECTS_SETTING_FILE = "remote-objects.yaml";
+ public static final String EMPTY_SAMPLE_GROOVY_FILE = "scripts/empty-sample.groovy";
+ public static final String EXAMPLE_GROOVY_FILE = "scripts/example.groovy";
+ public static final String LOCALHOST = "127.0.0.1";
+
+ public static final String JAVA_CMD =
+ System.getProperty("java.home") + File.separator + BIN_DIR + File.separator +
+ (SystemUtils.IS_OS_WINDOWS ? "java.exe" : "java");
+ public static final String PD_DIST_PATH =
+ PROJECT_DIR + File.separator + "hugegraph-pd" + File.separator;
+ public static final String PD_LIB_PATH =
+ getFileInDir(PD_DIST_PATH, PD_PACKAGE_PREFIX) + File.separator + LIB_DIR +
+ File.separator;
+ public static final String PD_TEMPLATE_PATH =
+ getFileInDir(PD_DIST_PATH, PD_PACKAGE_PREFIX) + File.separator + CONF_DIR +
+ File.separator;
+ public static final String STORE_DIST_PATH =
+ PROJECT_DIR + File.separator + "hugegraph-store" + File.separator;
+ public static final String STORE_LIB_PATH =
+ getFileInDir(STORE_DIST_PATH, STORE_PACKAGE_PREFIX) + File.separator + LIB_DIR +
+ File.separator;
+ public static final String STORE_TEMPLATE_PATH =
+ getFileInDir(STORE_DIST_PATH, STORE_PACKAGE_PREFIX) + File.separator + CONF_DIR +
+ File.separator;
+ public static final String SERVER_DIST_PATH =
+ PROJECT_DIR + File.separator + "hugegraph-server" + File.separator;
+ public static final String SERVER_LIB_PATH =
+ getFileInDir(SERVER_DIST_PATH, SERVER_PACKAGE_PREFIX) +
+ File.separator;
+ public static final String SERVER_PACKAGE_PATH =
+ getFileInDir(SERVER_DIST_PATH, SERVER_PACKAGE_PREFIX) +
+ File.separator;
+ public static final String SERVER_TEMPLATE_PATH =
+ SERVER_PACKAGE_PATH + CONF_DIR + File.separator;
+ public static final String CT_DIST_PATH =
+ PROJECT_DIR + File.separator + "hugegraph-cluster-test" + File.separator;
+ public static final String CT_PACKAGE_PATH =
+ getFileInDir(CT_DIST_PATH, CT_PACKAGE_PREFIX) + File.separator;
+ public static final String CONFIG_FILE_PATH = CT_PACKAGE_PATH + CONF_DIR + File.separator;
+
+ private ClusterConstant() {
+ throw new IllegalStateException("Utility class");
+ }
+
+ public static String getFileInDir(String path, String fileName) {
+ File dir = new File(path);
+ if (dir.exists() && dir.isDirectory()) {
+ for (File file : Objects.requireNonNull(dir.listFiles())) {
+ if (file.getName().startsWith(fileName) && !file.getName().endsWith(".gz")) {
+ return path + file.getName();
+ }
+ }
+ }
+ return "";
+ }
+
+ public static boolean isJava11OrHigher() {
+ String version = System.getProperty("java.version");
+ if (version.startsWith("1.")) {
+ version = version.substring(2, 3);
+ } else {
+ int dot = version.indexOf(".");
+ if (dot != -1) {
+ version = version.substring(0, dot);
+ }
+ }
+ int versionNumber = Integer.parseInt(version);
+ return versionNumber >= 11;
+ }
+
+ public static String getProjectDir() {
+ String userDir = System.getProperty("user.dir"); // get current dir
+ Path path = Paths.get(userDir);
+
+ if (userDir.endsWith("hugegraph-cluster-test")) {
+ return path.getParent().toString();
+ } else if (userDir.endsWith("hugegraph-clustertest-test")) {
+ return path.getParent().getParent().toString();
+ }
+
+ return userDir; // Return current dir if not matched
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java
new file mode 100644
index 0000000000..56449a42b0
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+public enum EnvType {
+
+ SingleNode,
+ MultiNode;
+
+ public static EnvType getSystemEnvType() {
+ String envType = System.getProperty("test_env", SingleNode.toString());
+ return EnvType.valueOf(envType);
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java
new file mode 100644
index 0000000000..4d4bab3831
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.slf4j.Logger;
+
+public class EnvUtil {
+
+ private static final Logger LOG = HGTestLogger.UTIL_LOG;
+ private static final Set ports = new HashSet<>();
+
+ public static int getAvailablePort() {
+ try {
+ int port = -1;
+ while (port < 0 || ports.contains(port)) {
+ ServerSocket socket = new ServerSocket(0);
+ port = socket.getLocalPort();
+ socket.close();
+ }
+ ports.add(port);
+ return port;
+ } catch (IOException e) {
+ LOG.error("Failed to get available ports", e);
+ return -1;
+ }
+ }
+
+ public static void copyFileToDestination(Path source, Path destination) {
+ try {
+ ensureParentDirectoryExists(destination);
+ Files.copy(source, destination, StandardCopyOption.REPLACE_EXISTING);
+ } catch (IOException ioException) {
+ LOG.error("Failed to copy files to destination dir", ioException);
+ throw new RuntimeException(ioException);
+ }
+ }
+
+ private static void ensureParentDirectoryExists(Path destination) throws IOException {
+ Path parentDir = destination.getParent();
+ if (parentDir != null && Files.notExists(parentDir)) {
+ Files.createDirectories(parentDir);
+ }
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java
new file mode 100644
index 0000000000..ceef1e40b3
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class HGTestLogger {
+
+ public static Logger UTIL_LOG = LoggerFactory.getLogger(HGTestLogger.class);
+ public static Logger ENV_LOG = LoggerFactory.getLogger(HGTestLogger.class);
+ public static Logger CONFIG_LOG = LoggerFactory.getLogger(HGTestLogger.class);
+ public static Logger NODE_LOG = LoggerFactory.getLogger(HGTestLogger.class);
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java
new file mode 100644
index 0000000000..36a7240d2f
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public abstract class AbstractConfig {
+
+ protected static final Logger LOG = HGTestLogger.CONFIG_LOG;
+ protected String config;
+ protected Map properties = new HashMap<>();
+ protected String fileName;
+
+ protected void readTemplate(Path filePath) {
+ try {
+ this.config = new String(Files.readAllBytes(filePath));
+ } catch (IOException e) {
+ LOG.error("failed to get file", e);
+ }
+ }
+
+ protected void updateConfigs() {
+ for (Map.Entry entry : properties.entrySet()) {
+ String placeholder = "$" + entry.getKey() + "$";
+ this.config = this.config.replace(placeholder, entry.getValue());
+ }
+ }
+
+ public void writeConfig(String filePath) {
+ updateConfigs();
+ Path destPath = Paths.get(filePath + File.separator + this.fileName);
+ try {
+ if (Files.notExists(destPath.getParent())) {
+ Files.createDirectories(destPath.getParent());
+ }
+ } catch (IOException e) {
+ LOG.error("Failed to create dir", e);
+ }
+ try (FileWriter writer = new FileWriter(String.valueOf(destPath))) {
+ writer.write(this.config);
+ } catch (IOException e) {
+ LOG.error("Failed to write in file", e);
+ }
+ }
+
+ public String getProperty(String propertyName) {
+ return properties.get(propertyName);
+ }
+
+ protected void setProperty(String propertyName, String value) {
+ if (properties.containsKey(propertyName)) {
+ properties.replace(propertyName, value);
+ } else {
+ properties.put(propertyName, value);
+ }
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java
new file mode 100644
index 0000000000..c71e4b07e1
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public class ClusterConfig {
+
+ protected static final Logger LOG = HGTestLogger.CONFIG_LOG;
+ protected List pdConfigs;
+ protected List storeConfigs;
+ protected List serverConfigs;
+ protected List graphConfigs;
+
+ protected List pdGrpcList, pdRaftList, storeGrpcList;
+
+ public ClusterConfig(int pdCnt, int storeCnt, int serverCnt) {
+ pdConfigs = new ArrayList<>();
+ storeConfigs = new ArrayList<>();
+ serverConfigs = new ArrayList<>();
+ graphConfigs = new ArrayList<>();
+ pdGrpcList = new ArrayList<>();
+ pdRaftList = new ArrayList<>();
+ storeGrpcList = new ArrayList<>();
+
+ for (int i = 0; i < pdCnt; i++) {
+ PDConfig pdConfig = new PDConfig();
+ pdConfig.setStoreCount(storeCnt);
+ pdConfigs.add(pdConfig);
+ pdGrpcList.add(pdConfig.getGrpcAddress());
+ pdRaftList.add(pdConfig.getRaftAddress());
+ }
+
+ for (int i = 0; i < storeCnt; i++) {
+ StoreConfig storeConfig = new StoreConfig();
+ storeConfig.setPDServerList(pdGrpcList);
+ storeConfigs.add(storeConfig);
+ storeGrpcList.add(storeConfig.getGrpcAddress());
+ }
+
+ for (int i = 0; i < serverCnt; i++) {
+ ServerConfig serverConfig = new ServerConfig();
+ serverConfigs.add(serverConfig);
+ GraphConfig graphConfig = new GraphConfig();
+ graphConfig.setPDPeersList(pdGrpcList);
+ graphConfigs.add(graphConfig);
+ }
+
+ for (int i = 0; i < pdCnt; i++) {
+ PDConfig pdConfig = pdConfigs.get(i);
+ pdConfig.setRaftPeerList(pdRaftList);
+ pdConfig.setStoreGrpcList(storeGrpcList);
+ }
+ }
+
+ public PDConfig getPDConfig(int i) {
+ return pdConfigs.get(i);
+ }
+
+ public StoreConfig getStoreConfig(int i) {
+ return storeConfigs.get(i);
+ }
+
+ public ServerConfig getServerConfig(int i) {
+ return serverConfigs.get(i);
+ }
+
+ public GraphConfig getGraphConfig(int i) {
+ return graphConfigs.get(i);
+ }
+
+ public List getPDRestAddrs() {
+ List addrs = new ArrayList<>();
+ for (PDConfig pdConfig : pdConfigs) {
+ addrs.add(pdConfig.getRaftAddress());
+ }
+ return addrs;
+ }
+
+ public List getPDGrpcAddrs() {
+ List addrs = new ArrayList<>();
+ for (PDConfig pdConfig : pdConfigs) {
+ addrs.add(pdConfig.getGrpcAddress());
+ }
+ return addrs;
+ }
+
+ public List getStoreRestAddrs() {
+ List addrs = new ArrayList<>();
+ for (StoreConfig storeConfig : storeConfigs) {
+ addrs.add("127.0.0.1" + ":" + storeConfig.getRestPort());
+ }
+ return addrs;
+ }
+
+ public List getStoreGrpcAddrs() {
+ List addrs = new ArrayList<>();
+ for (StoreConfig storeConfig : storeConfigs) {
+ addrs.add("127.0.0.1" + ":" + storeConfig.getGrpcPort());
+ }
+ return addrs;
+ }
+
+ public List getServerRestAddrs() {
+ List addrs = new ArrayList<>();
+ for (ServerConfig serverConfig : serverConfigs) {
+ addrs.add("127.0.0.1" + ":" + serverConfig.getRestPort());
+ }
+ return addrs;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java
new file mode 100644
index 0000000000..a6b425d51f
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GRAPH_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.HUGEGRAPH_PROPERTIES;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+public class GraphConfig extends AbstractConfig {
+
+ public GraphConfig() {
+ readTemplate(Paths.get(CONFIG_FILE_PATH + GRAPH_TEMPLATE_FILE));
+ this.fileName = HUGEGRAPH_PROPERTIES;
+ }
+
+ public void setPDPeersList(List pdPeersList) {
+ String pdPeers = String.join(",", pdPeersList);
+ setProperty("PD_PEERS_LIST", pdPeers);
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java
new file mode 100644
index 0000000000..d53e45d575
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.APPLICATION_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+import lombok.Getter;
+
+@Getter
+public class PDConfig extends AbstractConfig {
+
+ private final int raftPort;
+ private final int grpcPort;
+ private final int restPort;
+
+ public PDConfig() {
+ readTemplate(Paths.get(CONFIG_FILE_PATH + PD_TEMPLATE_FILE));
+ this.fileName = APPLICATION_FILE;
+ this.raftPort = getAvailablePort();
+ this.grpcPort = getAvailablePort();
+ this.restPort = getAvailablePort();
+ properties.put("GRPC_PORT", String.valueOf(this.grpcPort));
+ properties.put("REST_PORT", String.valueOf(this.restPort));
+ properties.put("RAFT_ADDRESS", LOCALHOST + ":" + this.raftPort);
+ }
+
+ public void setRaftPeerList(List raftPeerList) {
+ String raftPeers = String.join(",", raftPeerList);
+ setProperty("RAFT_PEERS_LIST", raftPeers);
+ }
+
+ public void setStoreCount(int storeCount) {
+ setProperty("STORE_COUNT", String.valueOf(storeCount));
+ }
+
+ public void setStoreGrpcList(List storeGrpcList) {
+ String storeGrpcLists = String.join(",", storeGrpcList);
+ setProperty("STORE_GRPC_LIST", storeGrpcLists);
+ }
+
+ public String getRaftAddress() {
+ return LOCALHOST + ":" + this.raftPort;
+ }
+
+ public String getGrpcAddress() {
+ return LOCALHOST + ":" + this.grpcPort;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java
new file mode 100644
index 0000000000..569a11dddf
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_PROPERTIES;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+
+import lombok.Getter;
+
+@Getter
+public class ServerConfig extends AbstractConfig {
+
+ private final int rpcPort;
+ private final int restPort;
+
+ public ServerConfig() {
+ readTemplate(Paths.get(CONFIG_FILE_PATH + SERVER_TEMPLATE_FILE));
+ this.fileName = SERVER_PROPERTIES;
+ this.rpcPort = getAvailablePort();
+ this.restPort = getAvailablePort();
+ properties.put("REST_SERVER_ADDRESS", LOCALHOST + ":" + this.restPort);
+ properties.put("RPC_PORT", String.valueOf(this.rpcPort));
+ }
+
+ public void setServerID(String serverID) {
+ setProperty("SERVER_ID", serverID);
+ }
+
+ public void setRole(String role) {
+ setProperty("ROLE", role);
+ }
+}
+
+
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java
new file mode 100644
index 0000000000..50495f18a5
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.APPLICATION_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+import lombok.Getter;
+
+@Getter
+public class StoreConfig extends AbstractConfig {
+
+ private final int raftPort;
+ private final int grpcPort;
+ private final int restPort;
+
+ public StoreConfig() {
+ readTemplate(Paths.get(CONFIG_FILE_PATH + STORE_TEMPLATE_FILE));
+ this.fileName = APPLICATION_FILE;
+ this.raftPort = getAvailablePort();
+ this.grpcPort = getAvailablePort();
+ this.restPort = getAvailablePort();
+ properties.put("GRPC_PORT", String.valueOf(this.grpcPort));
+ properties.put("REST_PORT", String.valueOf(this.restPort));
+ properties.put("RAFT_ADDRESS", LOCALHOST + ":" + this.raftPort);
+ }
+
+ public void setPDServerList(List pdServerList) {
+ String pdServers = String.join(",", pdServerList);
+ setProperty("PD_SERVER_ADDRESS", pdServers);
+ }
+
+ public String getGrpcAddress() {
+ return LOCALHOST + ":" + this.grpcPort;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java
new file mode 100644
index 0000000000..0c24860929
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.apache.hugegraph.ct.config.ClusterConfig;
+import org.apache.hugegraph.ct.config.GraphConfig;
+import org.apache.hugegraph.ct.config.PDConfig;
+import org.apache.hugegraph.ct.config.ServerConfig;
+import org.apache.hugegraph.ct.config.StoreConfig;
+import org.apache.hugegraph.ct.node.PDNodeWrapper;
+import org.apache.hugegraph.ct.node.ServerNodeWrapper;
+import org.apache.hugegraph.ct.node.StoreNodeWrapper;
+import org.slf4j.Logger;
+
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public abstract class AbstractEnv implements BaseEnv {
+
+ private static final Logger LOG = HGTestLogger.ENV_LOG;
+
+ protected ClusterConfig clusterConfig;
+ protected List pdNodeWrappers;
+ protected List serverNodeWrappers;
+ protected List storeNodeWrappers;
+ @Setter
+ protected int cluster_id = 0;
+
+ protected AbstractEnv() {
+ this.pdNodeWrappers = new ArrayList<>();
+ this.serverNodeWrappers = new ArrayList<>();
+ this.storeNodeWrappers = new ArrayList<>();
+ }
+
+ protected void init(int pdCnt, int storeCnt, int serverCnt) {
+ this.clusterConfig = new ClusterConfig(pdCnt, storeCnt, serverCnt);
+ for (int i = 0; i < pdCnt; i++) {
+ PDNodeWrapper pdNodeWrapper = new PDNodeWrapper(cluster_id, i);
+ PDConfig pdConfig = clusterConfig.getPDConfig(i);
+ pdNodeWrappers.add(pdNodeWrapper);
+ pdConfig.writeConfig(pdNodeWrapper.getNodePath() + CONF_DIR);
+ }
+
+ for (int i = 0; i < storeCnt; i++) {
+ StoreNodeWrapper storeNodeWrapper = new StoreNodeWrapper(cluster_id, i);
+ StoreConfig storeConfig = clusterConfig.getStoreConfig(i);
+ storeNodeWrappers.add(storeNodeWrapper);
+ storeConfig.writeConfig(storeNodeWrapper.getNodePath() + CONF_DIR);
+ }
+
+ for (int i = 0; i < serverCnt; i++) {
+ ServerNodeWrapper serverNodeWrapper = new ServerNodeWrapper(cluster_id, i);
+ serverNodeWrappers.add(serverNodeWrapper);
+ ServerConfig serverConfig = clusterConfig.getServerConfig(i);
+ serverConfig.setServerID(serverNodeWrapper.getID());
+ GraphConfig graphConfig = clusterConfig.getGraphConfig(i);
+ if (i == 0) {
+ serverConfig.setRole("master");
+ } else {
+ serverConfig.setRole("worker");
+ }
+ serverConfig.writeConfig(serverNodeWrapper.getNodePath() + CONF_DIR);
+ graphConfig.writeConfig(serverNodeWrapper.getNodePath() + CONF_DIR);
+ }
+ }
+
+ public void startCluster() {
+ for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+ pdNodeWrapper.start();
+ while (!pdNodeWrapper.isStarted()) {
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+ storeNodeWrapper.start();
+ while (!storeNodeWrapper.isStarted()) {
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+ serverNodeWrapper.start();
+ while (!serverNodeWrapper.isStarted()) {
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ }
+
+ public void stopCluster() {
+ for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+ serverNodeWrapper.stop();
+ }
+ for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+ storeNodeWrapper.stop();
+ }
+ for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+ pdNodeWrapper.stop();
+ }
+ }
+
+ public ClusterConfig getConf() {
+ return this.clusterConfig;
+ }
+
+ public List getPDRestAddrs() {
+ return clusterConfig.getPDRestAddrs();
+ }
+
+ public List getPDGrpcAddrs() {
+ return clusterConfig.getPDGrpcAddrs();
+ }
+
+ public List getStoreRestAddrs() {
+ return clusterConfig.getStoreRestAddrs();
+ }
+
+ public List getStoreGrpcAddrs() {
+ return clusterConfig.getStoreGrpcAddrs();
+ }
+
+ public List getServerRestAddrs() {
+ return clusterConfig.getServerRestAddrs();
+ }
+
+ public List getPDNodeDir() {
+ List nodeDirs = new ArrayList<>();
+ for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+ nodeDirs.add(pdNodeWrapper.getNodePath());
+ }
+ return nodeDirs;
+ }
+
+ public List getStoreNodeDir() {
+ List nodeDirs = new ArrayList<>();
+ for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+ nodeDirs.add(storeNodeWrapper.getNodePath());
+ }
+ return nodeDirs;
+ }
+
+ public List getServerNodeDir() {
+ List nodeDirs = new ArrayList<>();
+ for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+ nodeDirs.add(serverNodeWrapper.getNodePath());
+ }
+ return nodeDirs;
+ }
+
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java
new file mode 100644
index 0000000000..f6c4ba5fb6
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import java.util.List;
+
+import org.apache.hugegraph.ct.config.ClusterConfig;
+
+public interface BaseEnv {
+
+ /* init the cluster environment with simple mode */
+ void startCluster();
+
+ /* clear the cluster env and all config*/
+ void stopCluster();
+
+ ClusterConfig getConf();
+
+ void init();
+
+ List getPDRestAddrs();
+
+ List getPDGrpcAddrs();
+
+ List getStoreRestAddrs();
+
+ List getServerRestAddrs();
+
+ List getPDNodeDir();
+
+ List getStoreNodeDir();
+
+ List getServerNodeDir();
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java
new file mode 100644
index 0000000000..a716697c5a
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import org.apache.hugegraph.ct.base.EnvType;
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public class EnvFactory {
+
+ private static final Logger LOG = HGTestLogger.ENV_LOG;
+ private static BaseEnv env;
+
+ public static BaseEnv getEnv() {
+ if (env == null) {
+ EnvType envType = EnvType.getSystemEnvType();
+ switch (envType) {
+ case SingleNode:
+ env = new SimpleEnv();
+ break;
+ case MultiNode:
+ env = new MultiNodeEnv();
+ break;
+ default:
+ LOG.error("No such env type: {}", envType);
+ }
+ }
+ return env;
+ }
+
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java
new file mode 100644
index 0000000000..83a540f26a
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+public class MultiNodeEnv extends AbstractEnv {
+
+ public MultiNodeEnv() {
+ super();
+ this.init();
+ }
+
+ public MultiNodeEnv(int pdNum, int storeNum, int serverNum) {
+ super();
+ super.init(pdNum, storeNum, serverNum);
+ }
+
+ @Override
+ public void init() {
+ super.init(3, 3, 3);
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java
new file mode 100644
index 0000000000..595ed0fbe1
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+public class SimpleEnv extends AbstractEnv {
+
+ public SimpleEnv() {
+ super();
+ init();
+ }
+
+ public void init() {
+ super.init(1, 1, 1);
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java
new file mode 100644
index 0000000000..8236bb1392
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CT_PACKAGE_PATH;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Scanner;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.file.PathUtils;
+import org.apache.hugegraph.ct.base.ClusterConstant;
+import org.apache.hugegraph.ct.base.EnvUtil;
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+import lombok.Getter;
+
+public abstract class AbstractNodeWrapper implements BaseNodeWrapper {
+
+ protected final Logger LOG = HGTestLogger.NODE_LOG;
+
+ protected int clusterIndex;
+ @Getter
+ protected String workPath;
+ @Getter
+ protected String configPath;
+ protected Process instance;
+ protected int index;
+ protected List fileNames;
+ protected String startLine;
+
+ public AbstractNodeWrapper() {
+ this.clusterIndex = 1;
+ fileNames = new ArrayList<>();
+ this.configPath = getNodePath();
+ }
+
+ public AbstractNodeWrapper(int clusterIndex, int index) {
+ this.clusterIndex = clusterIndex;
+ this.index = index;
+ fileNames = new ArrayList<>();
+ this.configPath = getNodePath();
+ }
+
+ /**
+ * Node Dir should be created before changing Config
+ */
+ public void createNodeDir(Path sourcePath, String destDir) {
+ try {
+ try {
+ if (!new File(destDir).exists()) {
+ FileUtils.createParentDirectories(new File(destDir));
+ }
+ } catch (NoSuchFileException fileException) {
+ // Ignored
+ }
+ // To avoid following symbolic links
+ try (Stream stream = Files.walk(sourcePath)) {
+ stream.forEach(source -> {
+ Path relativePath = sourcePath.relativize(source);
+ Path destination = Paths.get(destDir).resolve(relativePath);
+ if (fileNames.contains(relativePath.toString())) {
+ EnvUtil.copyFileToDestination(source, destination);
+ }
+ });
+ }
+ } catch (IOException ioException) {
+ LOG.error("Got error copying files to node destination dir", ioException);
+ throw new AssertionError();
+ }
+ }
+
+ public void createLogDir() {
+ String logPath = getLogPath();
+ try {
+ FileUtils.createParentDirectories(new File(logPath));
+ } catch (IOException e) {
+ LOG.error("Create log dir failed", e);
+ throw new AssertionError();
+ }
+ }
+
+ public void deleteDir() {
+ try {
+ PathUtils.deleteDirectory(Paths.get(getNodePath()));
+ } catch (IOException ex) {
+ try {
+ TimeUnit.SECONDS.sleep(1);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ LOG.error("Fail to delete node file", e);
+ throw new AssertionError("Delete node dir failed. " + e);
+ }
+ }
+ }
+
+ /**
+ * @return (user.dir).id
+ */
+ @Override
+ public String getNodePath() {
+ return CT_PACKAGE_PATH + getID() + File.separator;
+ }
+
+ @Override
+ public String getLogPath() {
+ return getNodePath() + ClusterConstant.LOG + File.separator + getID() + "-start.log";
+ }
+
+ @Override
+ public void updateWorkPath(String workPath) {
+ this.workPath = workPath;
+ }
+
+ @Override
+ public void updateConfigPath(String ConfigPath) {
+ this.configPath = ConfigPath;
+ }
+
+ @Override
+ public boolean isStarted() {
+ try (Scanner sc = new Scanner(new FileReader(getLogPath()))) {
+ while (sc.hasNextLine()) {
+ String line = sc.nextLine();
+ if (line.contains(startLine)) return true;
+ }
+ } catch (FileNotFoundException ignored) {
+ }
+ return false;
+ }
+
+ public void stop() {
+ if (this.instance == null) {
+ return;
+ }
+ this.instance.destroy();
+ try {
+ if (!this.instance.waitFor(20, TimeUnit.SECONDS)) {
+ this.instance.destroyForcibly().waitFor(10, TimeUnit.SECONDS);
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ LOG.error("Waiting node to shutdown error.", e);
+ }
+ deleteDir();
+ }
+
+ public boolean isAlive() {
+ return this.instance.isAlive();
+ }
+
+ protected ProcessBuilder runCmd(List startCmd, File stdoutFile) throws IOException {
+ FileUtils.write(stdoutFile,
+ String.join(" ", startCmd) + System.lineSeparator() + System.lineSeparator(),
+ StandardCharsets.UTF_8, true);
+ ProcessBuilder processBuilder = new ProcessBuilder(startCmd)
+ .redirectOutput(ProcessBuilder.Redirect.appendTo(stdoutFile))
+ .redirectError(ProcessBuilder.Redirect.appendTo(stdoutFile));
+ processBuilder.directory(new File(configPath));
+ return processBuilder;
+ }
+
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java
new file mode 100644
index 0000000000..f428b227c4
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+public interface BaseNodeWrapper {
+
+ void start();
+
+ void stop();
+
+ boolean isAlive();
+
+ String getID();
+
+ String getNodePath();
+
+ String getLogPath();
+
+ void updateWorkPath(String workPath);
+
+ void updateConfigPath(String ConfigPath);
+
+ boolean isStarted();
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java
new file mode 100644
index 0000000000..a89c614c4c
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_JAR_PREFIX;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.getFileInDir;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class PDNodeWrapper extends AbstractNodeWrapper {
+
+ public PDNodeWrapper() {
+ super();
+ fileNames = new ArrayList<>(Arrays.asList(LOG4J_FILE));
+ this.workPath = PD_LIB_PATH;
+ this.startLine = "Hugegraph-pd started.";
+ createNodeDir(Paths.get(PD_TEMPLATE_PATH), getNodePath() + CONF_DIR + File.separator);
+ createLogDir();
+ }
+
+ public PDNodeWrapper(int clusterIndex, int index) {
+ super(clusterIndex, index);
+ this.fileNames = new ArrayList<>(Arrays.asList(LOG4J_FILE));
+ this.workPath = PD_LIB_PATH;
+ this.startLine = "Hugegraph-pd started.";
+ createNodeDir(Paths.get(PD_TEMPLATE_PATH), getNodePath() + CONF_DIR + File.separator);
+ createLogDir();
+ }
+
+ /*
+ workPath is path of JAR package, configPath is path of config files
+ */
+ @Override
+ public void start() {
+ try {
+ File stdoutFile = new File(getLogPath());
+ List startCmd = new ArrayList<>();
+ startCmd.add(JAVA_CMD);
+ if (!isJava11OrHigher()) {
+ LOG.error("Please make sure that the JDK is installed and the version >= 11");
+ return;
+ }
+
+ String pdNodeJarPath = getFileInDir(workPath, PD_JAR_PREFIX);
+ startCmd.addAll(Arrays.asList(
+ "-Dname=HugeGraphPD" + this.index,
+ "-Xms512m",
+ "-Xmx4g",
+ "-XX:+HeapDumpOnOutOfMemoryError",
+ "-XX:HeapDumpPath=" + configPath + "logs",
+ "-Dlog4j.configurationFile=" + configPath + File.separator +
+ CONF_DIR + File.separator + "log4j2.xml",
+ "-Dspring.config.location=" + configPath + CONF_DIR + File.separator +
+ "application.yml",
+ "-jar", pdNodeJarPath));
+ ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+ this.instance = processBuilder.start();
+ } catch (IOException ex) {
+ throw new AssertionError("Start node failed. " + ex);
+ }
+ }
+
+ @Override
+ public String getID() {
+ return "PD" + this.index;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java
new file mode 100644
index 0000000000..e39bc39557
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.EMPTY_SAMPLE_GROOVY_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.EXAMPLE_GROOVY_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.EXT_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GREMLIN_DRIVER_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GREMLIN_SERVER_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LIB_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PLUGINS_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.REMOTE_OBJECTS_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.REMOTE_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_PACKAGE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class ServerNodeWrapper extends AbstractNodeWrapper {
+
+ public ServerNodeWrapper(int clusterIndex, int index) {
+ super(clusterIndex, index);
+ this.fileNames = new ArrayList<>(
+ List.of(LOG4J_FILE, GREMLIN_SERVER_FILE, GREMLIN_DRIVER_SETTING_FILE,
+ REMOTE_SETTING_FILE, REMOTE_OBJECTS_SETTING_FILE));
+ this.workPath = SERVER_LIB_PATH;
+ createNodeDir(Paths.get(SERVER_TEMPLATE_PATH), getNodePath() + CONF_DIR + File.separator);
+ this.fileNames = new ArrayList<>(List.of(EMPTY_SAMPLE_GROOVY_FILE, EXAMPLE_GROOVY_FILE));
+ this.startLine = "INFO: [HttpServer] Started.";
+ createNodeDir(Paths.get(SERVER_PACKAGE_PATH), getNodePath());
+ createLogDir();
+ }
+
+ private static void addJarsToClasspath(File directory, List classpath) {
+ if (directory.exists() && directory.isDirectory()) {
+ File[] files = directory.listFiles((dir, name) -> name.endsWith(".jar"));
+ if (files != null) {
+ for (File file : files) {
+ classpath.add(file.getAbsolutePath());
+ }
+ }
+ }
+ }
+
+ @Override
+ public void start() {
+ try {
+ File stdoutFile = new File(getLogPath());
+ List startCmd = new ArrayList<>();
+ startCmd.add(JAVA_CMD);
+ if (!isJava11OrHigher()) {
+ LOG.error("Please make sure that the JDK is installed and the version >= 11");
+ return;
+ }
+
+ List classpath = new ArrayList<>();
+ addJarsToClasspath(new File(workPath + LIB_DIR), classpath);
+ addJarsToClasspath(new File(workPath + EXT_DIR), classpath);
+ addJarsToClasspath(new File(workPath + PLUGINS_DIR), classpath);
+ String storeClassPath = String.join(":", classpath);
+
+ startCmd.addAll(Arrays.asList(
+ "-Dname=HugeGraphServer" + this.index,
+ "--add-exports=java.base/jdk.internal.reflect=ALL-UNNAMED",
+ "-cp", storeClassPath,
+ "org.apache.hugegraph.dist.HugeGraphServer",
+ "./conf/gremlin-server.yaml",
+ "./conf/rest-server.properties"));
+ ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+ this.instance = processBuilder.start();
+ } catch (IOException ex) {
+ throw new AssertionError("Started server node failed. " + ex);
+ }
+ }
+
+ @Override
+ public String getID() {
+ return "Server" + this.index;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java
new file mode 100644
index 0000000000..1cb0f67eae
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_JAR_PREFIX;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.getFileInDir;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class StoreNodeWrapper extends AbstractNodeWrapper {
+
+ public StoreNodeWrapper() {
+ super();
+ this.fileNames = new ArrayList<>(List.of(LOG4J_FILE));
+ this.workPath = STORE_LIB_PATH;
+ this.startLine = "o.a.h.s.n.StoreNodeApplication - Starting StoreNodeApplication";
+ createNodeDir(Paths.get(STORE_TEMPLATE_PATH), getNodePath() + CONF_DIR + File.separator);
+ createLogDir();
+ }
+
+ public StoreNodeWrapper(int clusterId, int index) {
+ super(clusterId, index);
+ this.fileNames = new ArrayList<>(List.of(LOG4J_FILE));
+ this.workPath = STORE_LIB_PATH;
+ this.startLine = "o.a.h.s.n.StoreNodeApplication - Starting StoreNodeApplication";
+ createNodeDir(Paths.get(STORE_TEMPLATE_PATH), getNodePath() + CONF_DIR + File.separator);
+ createLogDir();
+ }
+
+ @Override
+ public void start() {
+ try {
+ File stdoutFile = new File(getLogPath());
+ List startCmd = new ArrayList<>();
+ startCmd.add(JAVA_CMD);
+ if (!isJava11OrHigher()) {
+ LOG.error("Please make sure that the JDK is installed and the version >= 11");
+ return;
+ }
+
+ String storeNodeJarPath = getFileInDir(workPath, STORE_JAR_PREFIX);
+ startCmd.addAll(Arrays.asList(
+ "-Dname=HugeGraphStore" + this.index,
+ "-Dlog4j.configurationFile=" + configPath + CONF_DIR
+ + File.separator + "log4j2.xml",
+ "-Dfastjson.parser.safeMode=true",
+ "-Xms512m",
+ "-Xmx2048m",
+ "-XX:MetaspaceSize=256M",
+ "-XX:+UseG1GC",
+ "-XX:+ParallelRefProcEnabled",
+ "-XX:+HeapDumpOnOutOfMemoryError",
+ "-XX:HeapDumpPath=" + configPath + "logs",
+ "-Dspring.config.location=" + configPath + CONF_DIR
+ + File.separator + "application.yml",
+ "-jar", storeNodeJarPath));
+ ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+ this.instance = processBuilder.start();
+ } catch (IOException ex) {
+ throw new AssertionError("Start node failed. " + ex);
+ }
+ }
+
+ @Override
+ public String getID() {
+ return "Store" + this.index;
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml b/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml
new file mode 100644
index 0000000000..c888404545
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml
@@ -0,0 +1,98 @@
+
+
+
+
+ 4.0.0
+
+ org.apache.hugegraph
+ hugegraph-cluster-test
+ ${revision}
+
+
+ hugegraph-clustertest-test
+
+
+ 11
+ 11
+ UTF-8
+
+
+
+ org.apache.hugegraph
+ hugegraph-clustertest-minicluster
+ ${revision}
+ compile
+
+
+
+ org.apache.hugegraph
+ hugegraph-client
+ ${toolchain.vision}
+
+
+ org.apache.hugegraph
+ hg-pd-client
+ ${revision}
+
+
+ junit
+ junit
+ 4.13.2
+ compile
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.20
+
+
+ simple-cluster-test
+
+ ${basedir}/src/main/java/
+
+ ${basedir}/target/classes/
+
+
+ **/SimpleClusterSuiteTest.java
+
+
+
+
+ multi-cluster-test
+
+ ${basedir}/src/main/java/
+
+ ${basedir}/target/classes/
+
+
+ **/MultiClusterSuiteTest.java
+
+
+
+
+
+
+
+
+
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java
new file mode 100644
index 0000000000..59394101c2
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.hugegraph.ct.env.BaseEnv;
+import org.apache.hugegraph.ct.env.MultiNodeEnv;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/**
+ * MultiNode Test generate the cluster env with 3 pd node + 3 store node + 3 server node.
+ * Or you can set different num of nodes by using env = new MultiNodeEnv(pdNum, storeNum, serverNum)
+ * All nodes are deployed in ports generated randomly, the application of nodes are stored
+ * in /apache-hugegraph-ct-incubating-1.5.0, you can visit each node with rest api.
+ */
+public class BaseMultiClusterTest {
+
+ protected static BaseEnv env;
+ protected static Process p;
+
+ @BeforeClass
+ public static void initEnv() {
+ env = new MultiNodeEnv();
+ env.startCluster();
+ }
+
+ @AfterClass
+ public static void clearEnv() {
+ env.stopCluster();
+ }
+
+ protected String execCmd(String[] cmds) throws IOException {
+ ProcessBuilder process = new ProcessBuilder(cmds);
+ p = process.start();
+ BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));
+ StringBuilder builder = new StringBuilder();
+ String line;
+ while ((line = reader.readLine()) != null) {
+ builder.append(line);
+ builder.append(System.lineSeparator());
+ }
+ p.destroy();
+ return builder.toString();
+ }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java
new file mode 100644
index 0000000000..0318df1ad0
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hugegraph.driver.GraphManager;
+import org.apache.hugegraph.driver.GremlinManager;
+import org.apache.hugegraph.driver.HugeClient;
+import org.apache.hugegraph.driver.SchemaManager;
+import org.apache.hugegraph.pd.client.PDClient;
+import org.apache.hugegraph.pd.client.PDConfig;
+import org.apache.hugegraph.pd.common.PDException;
+import org.apache.hugegraph.structure.constant.T;
+import org.apache.hugegraph.structure.graph.Edge;
+import org.apache.hugegraph.structure.graph.Path;
+import org.apache.hugegraph.structure.graph.Vertex;
+import org.apache.hugegraph.structure.gremlin.Result;
+import org.apache.hugegraph.structure.gremlin.ResultSet;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class MultiClusterDeployTest extends BaseMultiClusterTest {
+
+ @Test
+ public void testPDNodesDeployment() {
+ try {
+ List addrs = env.getPDGrpcAddrs();
+ for (String addr : addrs) {
+ PDConfig pdConfig = PDConfig.of(addr);
+ PDClient pdClient = PDClient.create(pdConfig);
+ pdClient.dbCompaction();
+ }
+ assert true;
+ } catch (PDException e) {
+ assert false;
+ }
+ }
+
+ @Test
+ public void testStoreNodesDeployment() throws IOException {
+ List addrs = env.getStoreRestAddrs();
+ for (String addr : addrs) {
+ String[] cmds = {"curl", addr};
+ // TODO: why not use the sb param?
+ StringBuilder sb = new StringBuilder();
+ for (String cmd : cmds) {
+ sb.append(cmd).append(" ");
+ }
+ String responseMsg = execCmd(cmds);
+ Assert.assertTrue(responseMsg.startsWith("{"));
+ }
+ }
+
+ @Test
+ public void testServerNodesDeployment() {
+ List addrs = env.getServerRestAddrs();
+ for (String addr : addrs) {
+ HugeClient hugeClient = HugeClient.builder("http://" + addr, "hugegraph")
+ .build();
+ SchemaManager schema = hugeClient.schema();
+
+ schema.propertyKey("name").asText().ifNotExist().create();
+ schema.propertyKey("age").asInt().ifNotExist().create();
+ schema.propertyKey("city").asText().ifNotExist().create();
+ schema.propertyKey("weight").asDouble().ifNotExist().create();
+ schema.propertyKey("lang").asText().ifNotExist().create();
+ schema.propertyKey("date").asDate().ifNotExist().create();
+ schema.propertyKey("price").asInt().ifNotExist().create();
+
+ schema.vertexLabel("person")
+ .properties("name", "age", "city")
+ .primaryKeys("name")
+ .ifNotExist()
+ .create();
+
+ schema.vertexLabel("software")
+ .properties("name", "lang", "price")
+ .primaryKeys("name")
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("personByCity")
+ .onV("person")
+ .by("city")
+ .secondary()
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("personByAgeAndCity")
+ .onV("person")
+ .by("age", "city")
+ .secondary()
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("softwareByPrice")
+ .onV("software")
+ .by("price")
+ .range()
+ .ifNotExist()
+ .create();
+
+ schema.edgeLabel("knows")
+ .sourceLabel("person")
+ .targetLabel("person")
+ .properties("date", "weight")
+ .ifNotExist()
+ .create();
+
+ schema.edgeLabel("created")
+ .sourceLabel("person").targetLabel("software")
+ .properties("date", "weight")
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("createdByDate")
+ .onE("created")
+ .by("date")
+ .secondary()
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("createdByWeight")
+ .onE("created")
+ .by("weight")
+ .range()
+ .ifNotExist()
+ .create();
+
+ schema.indexLabel("knowsByWeight")
+ .onE("knows")
+ .by("weight")
+ .range()
+ .ifNotExist()
+ .create();
+
+ GraphManager graph = hugeClient.graph();
+ Vertex marko = graph.addVertex(T.LABEL, "person", "name", "marko",
+ "age", 29, "city", "Beijing");
+ Vertex vadas = graph.addVertex(T.LABEL, "person", "name", "vadas",
+ "age", 27, "city", "Hongkong");
+ Vertex lop = graph.addVertex(T.LABEL, "software", "name", "lop",
+ "lang", "java", "price", 328);
+ Vertex josh = graph.addVertex(T.LABEL, "person", "name", "josh",
+ "age", 32, "city", "Beijing");
+ Vertex ripple = graph.addVertex(T.LABEL, "software", "name", "ripple",
+ "lang", "java", "price", 199);
+ Vertex peter = graph.addVertex(T.LABEL, "person", "name", "peter",
+ "age", 35, "city", "Shanghai");
+
+ marko.addEdge("knows", vadas, "date", "2016-01-10", "weight", 0.5);
+ marko.addEdge("knows", josh, "date", "2013-02-20", "weight", 1.0);
+ marko.addEdge("created", lop, "date", "2017-12-10", "weight", 0.4);
+ josh.addEdge("created", lop, "date", "2009-11-11", "weight", 0.4);
+ josh.addEdge("created", ripple, "date", "2017-12-10", "weight", 1.0);
+ peter.addEdge("created", lop, "date", "2017-03-24", "weight", 0.2);
+
+ GremlinManager gremlin = hugeClient.gremlin();
+ System.out.println("==== Path ====");
+ ResultSet resultSet = gremlin.gremlin("g.V().outE().path()").execute();
+ Iterator results = resultSet.iterator();
+ results.forEachRemaining(result -> {
+ System.out.println(result.getObject().getClass());
+ Object object = result.getObject();
+ if (object instanceof Vertex) {
+ System.out.println(((Vertex) object).id());
+ } else if (object instanceof Edge) {
+ System.out.println(((Edge) object).id());
+ } else if (object instanceof Path) {
+ List