diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e1874aec..5610ffe4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-20.04
env:
TRAVIS_DIR: assembly/travis
- COMMIT_ID: 2dd2d7c3fd7cb42cd7338d70a73e04b0c5604487
+ COMMIT_ID: 1d031c5905cbef008dd5fb468576b0e6a9445181
steps:
- name: Install JDK 8
uses: actions/setup-java@v2
@@ -43,7 +43,7 @@ jobs:
- name: Prepare env and service
run: |
- $TRAVIS_DIR/install-hugegraph-from-source.sh $COMMIT_ID | grep -v "Downloading\|Downloaded"
+ $TRAVIS_DIR/install-hugegraph-from-source.sh $COMMIT_ID
- name: Run test
run: |
diff --git a/BCLOUD b/BCLOUD
deleted file mode 100644
index 4a4172a1..00000000
--- a/BCLOUD
+++ /dev/null
@@ -1 +0,0 @@
-BUILD_SUBMITTER -x -e CENTOS6U3 -m baidu/xbu-data/hugegraph-client -c "export MAVEN_HOME=/home/scmtools/buildkit/maven/apache-maven-3.3.9/ && export JAVA_HOME=/home/scmtools/buildkit/java/jdk1.8.0_25/ && export PATH=$JAVA_HOME/bin:$MAVEN_HOME/bin:$PATH && cd baidu/xbu-data/hugegraph-client && sh build.sh && mkdir output && cp BCLOUD ./output/" -u ./
diff --git a/assembly/travis/conf/hugegraph.properties b/assembly/travis/conf/graphs/hugegraph.properties
similarity index 100%
rename from assembly/travis/conf/hugegraph.properties
rename to assembly/travis/conf/graphs/hugegraph.properties
diff --git a/assembly/travis/conf/gremlin-server.yaml b/assembly/travis/conf/gremlin-server.yaml
index 600e631c..69e89902 100644
--- a/assembly/travis/conf/gremlin-server.yaml
+++ b/assembly/travis/conf/gremlin-server.yaml
@@ -6,8 +6,8 @@
scriptEvaluationTimeout: 30000
channelizer: org.apache.tinkerpop.gremlin.server.channel.WsAndHttpChannelizer
+# don't set graph at here, this happens after support for dynamically adding graph
graphs: {
- hugegraph: conf/hugegraph.properties
}
scriptEngines: {
gremlin-groovy: {
@@ -17,16 +17,30 @@ scriptEngines: {
org.apache.tinkerpop.gremlin.jsr223.ImportGremlinPlugin: {
classImports: [
java.lang.Math,
+ com.baidu.hugegraph.backend.id.IdGenerator,
com.baidu.hugegraph.type.define.Directions,
- com.baidu.hugegraph.traversal.algorithm.CustomizePathsTraverser,
+ com.baidu.hugegraph.type.define.NodeRole,
+ com.baidu.hugegraph.traversal.algorithm.CollectionPathsTraverser,
+ com.baidu.hugegraph.traversal.algorithm.CountTraverser,
com.baidu.hugegraph.traversal.algorithm.CustomizedCrosspointsTraverser,
+ com.baidu.hugegraph.traversal.algorithm.CustomizePathsTraverser,
com.baidu.hugegraph.traversal.algorithm.FusiformSimilarityTraverser,
com.baidu.hugegraph.traversal.algorithm.HugeTraverser,
+ com.baidu.hugegraph.traversal.algorithm.JaccardSimilarTraverser,
+ com.baidu.hugegraph.traversal.algorithm.KneighborTraverser,
+ com.baidu.hugegraph.traversal.algorithm.KoutTraverser,
+ com.baidu.hugegraph.traversal.algorithm.MultiNodeShortestPathTraverser,
com.baidu.hugegraph.traversal.algorithm.NeighborRankTraverser,
com.baidu.hugegraph.traversal.algorithm.PathsTraverser,
com.baidu.hugegraph.traversal.algorithm.PersonalRankTraverser,
+ com.baidu.hugegraph.traversal.algorithm.SameNeighborTraverser,
com.baidu.hugegraph.traversal.algorithm.ShortestPathTraverser,
+ com.baidu.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser,
com.baidu.hugegraph.traversal.algorithm.SubGraphTraverser,
+ com.baidu.hugegraph.traversal.algorithm.TemplatePathsTraverser,
+ com.baidu.hugegraph.traversal.algorithm.steps.EdgeStep,
+ com.baidu.hugegraph.traversal.algorithm.steps.RepeatEdgeStep,
+ com.baidu.hugegraph.traversal.algorithm.steps.WeightedEdgeStep,
com.baidu.hugegraph.traversal.optimize.Text,
com.baidu.hugegraph.traversal.optimize.TraversalUtil,
com.baidu.hugegraph.util.DateUtil
@@ -40,12 +54,6 @@ scriptEngines: {
}
}
serializers:
- - { className: org.apache.tinkerpop.gremlin.driver.ser.GryoLiteMessageSerializerV1d0,
- config: {
- serializeResultToString: false,
- ioRegistries: [com.baidu.hugegraph.io.HugeGraphIoRegistry]
- }
- }
- { className: org.apache.tinkerpop.gremlin.driver.ser.GraphBinaryMessageSerializerV1,
config: {
serializeResultToString: false,
@@ -72,7 +80,7 @@ serializers:
}
metrics: {
consoleReporter: {enabled: false, interval: 180000},
- csvReporter: {enabled: true, interval: 180000, fileName: /tmp/gremlin-server-metrics.csv},
+ csvReporter: {enabled: false, interval: 180000, fileName: ./metrics/gremlin-server-metrics.csv},
jmxReporter: {enabled: false},
slf4jReporter: {enabled: false, interval: 180000},
gangliaReporter: {enabled: false, interval: 180000, addressingMode: MULTICAST},
diff --git a/assembly/travis/conf/rest-server.properties b/assembly/travis/conf/rest-server.properties
index a9ea865b..d44ff6bc 100644
--- a/assembly/travis/conf/rest-server.properties
+++ b/assembly/travis/conf/rest-server.properties
@@ -3,10 +3,39 @@ restserver.url=http://127.0.0.1:8080
# gremlin server url, need to be consistent with host and port in gremlin-server.yaml
#gremlinserver.url=http://127.0.0.1:8182
-# graphs list with pair NAME:CONF_PATH
-graphs=[hugegraph:conf/hugegraph.properties]
+graphs=./conf/graphs
-# authentication
+# The maximum thread ratio for batch writing, only take effect if the batch.max_write_threads is 0
+batch.max_write_ratio=80
+batch.max_write_threads=0
+
+# authentication configs
+# choose 'com.baidu.hugegraph.auth.StandardAuthenticator' or 'com.baidu.hugegraph.auth.ConfigAuthenticator'
auth.authenticator=com.baidu.hugegraph.auth.StandardAuthenticator
+
+# for StandardAuthenticator mode
+#auth.graph_store=hugegraph
+# auth client config
+#auth.remote_url=127.0.0.1:8899,127.0.0.1:8898,127.0.0.1:8897
+
+# for ConfigAuthenticator mode
#auth.admin_token=
#auth.user_tokens=[]
+
+# rpc group configs of multi graph servers
+# rpc server configs
+rpc.server_host=127.0.0.1
+rpc.server_port=8090
+#rpc.server_timeout=30
+
+# rpc client configs (like enable to keep cache consistency)
+rpc.remote_url=127.0.0.1:8090
+#rpc.client_connect_timeout=20
+#rpc.client_reconnect_period=10
+#rpc.client_read_timeout=40
+#rpc.client_retries=3
+#rpc.client_load_balancer=consistentHash
+
+# lightweight load balancing (beta)
+server.id=server-1
+server.role=master
diff --git a/assembly/travis/install-hugegraph-from-source.sh b/assembly/travis/install-hugegraph-from-source.sh
index 31d7e89c..c594320e 100755
--- a/assembly/travis/install-hugegraph-from-source.sh
+++ b/assembly/travis/install-hugegraph-from-source.sh
@@ -9,26 +9,34 @@ fi
COMMIT_ID=$1
HUGEGRAPH_GIT_URL="https://github.com/hugegraph/hugegraph.git"
+GIT_DIR=hugegraph
+# download code and compile
git clone --depth 100 ${HUGEGRAPH_GIT_URL}
-cd hugegraph
+cd "${GIT_DIR}"
git checkout ${COMMIT_ID}
mvn package -DskipTests
-mv hugegraph-*.tar.gz ../
+
+TAR=$(echo hugegraph-*.tar.gz)
+tar -zxvf "${TAR}" -C ../
cd ../
-rm -rf hugegraph
-tar -zxvf hugegraph-*.tar.gz
+rm -rf "${GIT_DIR}"
+HTTP_SERVER_DIR=$(echo hugegraph-*)
HTTPS_SERVER_DIR="hugegraph_https"
-mkdir ${HTTPS_SERVER_DIR}
-cp -r hugegraph-*/. ${HTTPS_SERVER_DIR}
-cd "$(find hugegraph-* | head -1)"
-cp ../$TRAVIS_DIR/conf/* conf
+
+cp -r "${HTTP_SERVER_DIR}" "${HTTPS_SERVER_DIR}"
+
+# config auth options just for http server (must keep '/.')
+cp -rf "${TRAVIS_DIR}"/conf/. "${HTTP_SERVER_DIR}"/conf/
+
# start HugeGraphServer with http protocol
+cd "${HTTP_SERVER_DIR}"
echo -e "pa" | bin/init-store.sh || exit 1
bin/start-hugegraph.sh || exit 1
-cd ../${HTTPS_SERVER_DIR}
+# config options for https server
+cd ../"${HTTPS_SERVER_DIR}"
REST_SERVER_CONFIG="conf/rest-server.properties"
GREMLIN_SERVER_CONFIG="conf/gremlin-server.yaml"
sed -i "s?http://127.0.0.1:8080?https://127.0.0.1:8443?g" "$REST_SERVER_CONFIG"
diff --git a/assembly/travis/install-hugegraph-from-tar.sh b/assembly/travis/install-hugegraph-from-tar.sh
deleted file mode 100755
index 6c546ddb..00000000
--- a/assembly/travis/install-hugegraph-from-tar.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/bash
-
-set -ev
-
-TRAVIS_DIR=`dirname $0`
-
-if [ $# -ne 1 ]; then
- echo "Must pass base branch name of pull request"
- exit 1
-fi
-
-CLIENT_BRANCH=$1
-HUGEGRAPH_BRANCH=$CLIENT_BRANCH
-
-HUGEGRAPH_GIT_URL="https://github.com/hugegraph/hugegraph.git"
-
-git clone $HUGEGRAPH_GIT_URL
-
-cd hugegraph
-
-git checkout $HUGEGRAPH_BRANCH
-
-mvn package -DskipTests
-
-mv hugegraph-*.tar.gz ../
-
-cd ../
-
-rm -rf hugegraph
-
-tar -zxvf hugegraph-*.tar.gz
-
-HTTPS_SERVER_DIR="hugegraph_https"
-
-mkdir $HTTPS_SERVER_DIR
-
-cp -r hugegraph-*/. $HTTPS_SERVER_DIR
-
-cd hugegraph-*
-
-cp ../$TRAVIS_DIR/conf/* conf
-
-echo -e "pa" | bin/init-store.sh
-
-bin/start-hugegraph.sh
-
-cd ../
-
-cd $HTTPS_SERVER_DIR
-
-REST_SERVER_CONFIG="conf/rest-server.properties"
-
-GREMLIN_SERVER_CONFIG="conf/gremlin-server.yaml"
-
-sed -i "s?http://127.0.0.1:8080?https://127.0.0.1:8443?g" "$REST_SERVER_CONFIG"
-
-sed -i "s/#port: 8182/port: 8282/g" "$GREMLIN_SERVER_CONFIG"
-
-echo "ssl.keystore_password=hugegraph" >> $REST_SERVER_CONFIG
-
-echo "ssl.keystore_file=conf/hugegraph-server.keystore" >> $REST_SERVER_CONFIG
-
-echo "gremlinserver.url=http://127.0.0.1:8282" >> $REST_SERVER_CONFIG
-
-bin/init-store.sh
-
-bin/start-hugegraph.sh
-
-cd ../
diff --git a/pom.xml b/pom.xml
index 58775d09..e0184766 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
com.baidu.hugegraph
hugegraph-client
- 2.0.0
+ 2.0.1
jar
hugegraph-client
@@ -113,7 +113,7 @@
- 1.9.4.0
+ 2.0.1.0
diff --git a/src/main/java/com/baidu/hugegraph/api/graphs/GraphsAPI.java b/src/main/java/com/baidu/hugegraph/api/graphs/GraphsAPI.java
index 02d35e5f..f33d3ea4 100644
--- a/src/main/java/com/baidu/hugegraph/api/graphs/GraphsAPI.java
+++ b/src/main/java/com/baidu/hugegraph/api/graphs/GraphsAPI.java
@@ -22,6 +22,13 @@
import java.util.List;
import java.util.Map;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.apache.commons.lang3.StringUtils;
+
import com.baidu.hugegraph.api.API;
import com.baidu.hugegraph.client.RestClient;
import com.baidu.hugegraph.exception.InvalidResponseException;
@@ -33,6 +40,13 @@
public class GraphsAPI extends API {
+ private static final String DELIMITER = "/";
+ private static final String MODE = "mode";
+ private static final String GRAPH_READ_MODE = "graph_read_mode";
+ private static final String CLEAR = "clear";
+
+ private static final String CONFIRM_MESSAGE = "confirm_message";
+
public GraphsAPI(RestClient client) {
super(client);
this.path(this.type());
@@ -43,6 +57,21 @@ protected String type() {
return HugeType.GRAPHS.string();
}
+ @SuppressWarnings("unchecked")
+ public Map create(String name, String cloneGraphName,
+ String configText) {
+ this.client.checkApiVersion("0.67", "dynamic graph add");
+ MultivaluedMap headers = new MultivaluedHashMap<>();
+ headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON);
+ Map params = null;
+ if (StringUtils.isNotEmpty(cloneGraphName)) {
+ params = ImmutableMap.of("clone_graph_name", cloneGraphName);
+ }
+ RestResult result = this.client.post(joinPath(this.path(), name),
+ configText, headers, params);
+ return result.readObject(Map.class);
+ }
+
@SuppressWarnings("unchecked")
public Map get(String name) {
RestResult result = this.client.get(this.path(), name);
@@ -54,19 +83,28 @@ public List list() {
return result.readList(this.type(), String.class);
}
+ public void clear(String graph, String message) {
+ this.client.delete(joinPath(this.path(), graph, CLEAR),
+ ImmutableMap.of(CONFIRM_MESSAGE, message));
+ }
+
+ public void drop(String graph, String message) {
+ this.client.checkApiVersion("0.67", "dynamic graph delete");
+ this.client.delete(joinPath(this.path(), graph),
+ ImmutableMap.of(CONFIRM_MESSAGE, message));
+ }
+
public void mode(String graph, GraphMode mode) {
- String path = String.join("/", this.path(), graph);
// NOTE: Must provide id for PUT. If use "graph/mode", "/" will
// be encoded to "%2F". So use "mode" here although inaccurate.
- this.client.put(path, "mode", mode);
+ this.client.put(joinPath(this.path(), graph, MODE), null, mode);
}
public GraphMode mode(String graph) {
- String path = String.join("/", this.path(), graph);
- RestResult result = this.client.get(path, "mode");
+ RestResult result = this.client.get(joinPath(this.path(), graph), MODE);
@SuppressWarnings("unchecked")
Map mode = result.readObject(Map.class);
- String value = mode.get("mode");
+ String value = mode.get(MODE);
if (value == null) {
throw new InvalidResponseException(
"Invalid response, expect 'mode' in response");
@@ -81,20 +119,20 @@ public GraphMode mode(String graph) {
public void readMode(String graph, GraphReadMode readMode) {
this.client.checkApiVersion("0.59", "graph read mode");
- String path = String.join("/", this.path(), graph);
// NOTE: Must provide id for PUT. If use "graph/graph_read_mode", "/"
// will be encoded to "%2F". So use "graph_read_mode" here although
// inaccurate.
- this.client.put(path, "graph_read_mode", readMode);
+ this.client.put(joinPath(this.path(), graph, GRAPH_READ_MODE),
+ null, readMode);
}
public GraphReadMode readMode(String graph) {
this.client.checkApiVersion("0.59", "graph read mode");
- String path = String.join("/", this.path(), graph);
- RestResult result = this.client.get(path, "graph_read_mode");
+ RestResult result = this.client.get(joinPath(this.path(), graph),
+ GRAPH_READ_MODE);
@SuppressWarnings("unchecked")
Map readMode = result.readObject(Map.class);
- String value = readMode.get("graph_read_mode");
+ String value = readMode.get(GRAPH_READ_MODE);
if (value == null) {
throw new InvalidResponseException(
"Invalid response, expect 'graph_read_mode' in response");
@@ -107,9 +145,11 @@ public GraphReadMode readMode(String graph) {
}
}
- public void clear(String graph, String message) {
- String path = String.join("/", this.path(), graph, "clear");
- this.client.delete(path,
- ImmutableMap.of("confirm_message", message));
+ private static String joinPath(String path, String graph) {
+ return String.join(DELIMITER, path, graph);
+ }
+
+ private static String joinPath(String path, String graph, String action) {
+ return String.join(DELIMITER, path, graph, action);
}
}
diff --git a/src/main/java/com/baidu/hugegraph/driver/GraphsManager.java b/src/main/java/com/baidu/hugegraph/driver/GraphsManager.java
index 3b5dda04..bbb933ad 100644
--- a/src/main/java/com/baidu/hugegraph/driver/GraphsManager.java
+++ b/src/main/java/com/baidu/hugegraph/driver/GraphsManager.java
@@ -35,6 +35,19 @@ public GraphsManager(RestClient client) {
this.graphsAPI = new GraphsAPI(client);
}
+ public Map createGraph(String name, String configText) {
+ return this.graphsAPI.create(name, null, configText);
+ }
+
+ public Map cloneGraph(String name, String cloneGraphName) {
+ return this.graphsAPI.create(name, cloneGraphName, null);
+ }
+
+ public Map cloneGraph(String name, String cloneGraphName,
+ String configText) {
+ return this.graphsAPI.create(name, cloneGraphName, configText);
+ }
+
public Map getGraph(String graph) {
return this.graphsAPI.get(graph);
}
@@ -43,6 +56,14 @@ public List listGraph() {
return this.graphsAPI.list();
}
+ public void clearGraph(String graph, String message) {
+ this.graphsAPI.clear(graph, message);
+ }
+
+ public void dropGraph(String graph, String message) {
+ this.graphsAPI.drop(graph, message);
+ }
+
public void mode(String graph, GraphMode mode) {
this.graphsAPI.mode(graph, mode);
}
@@ -58,8 +79,4 @@ public void readMode(String graph, GraphReadMode readMode) {
public GraphReadMode readMode(String graph) {
return this.graphsAPI.readMode(graph);
}
-
- public void clear(String graph, String message) {
- this.graphsAPI.clear(graph, message);
- }
}
diff --git a/src/main/java/com/baidu/hugegraph/driver/HugeClient.java b/src/main/java/com/baidu/hugegraph/driver/HugeClient.java
index 45a41f0b..6f214e4e 100644
--- a/src/main/java/com/baidu/hugegraph/driver/HugeClient.java
+++ b/src/main/java/com/baidu/hugegraph/driver/HugeClient.java
@@ -34,6 +34,8 @@ public class HugeClient implements Closeable {
ClientVersion.check();
}
private final RestClient client;
+ private final boolean borrowedClient;
+
private VersionManager version;
private GraphsManager graphs;
private SchemaManager schema;
@@ -47,6 +49,7 @@ public class HugeClient implements Closeable {
private MetricsManager metrics;
public HugeClient(HugeClientBuilder builder) {
+ this.borrowedClient = false;
try {
this.client = new RestClient(builder.url(),
builder.username(),
@@ -57,7 +60,8 @@ public HugeClient(HugeClientBuilder builder) {
builder.trustStoreFile(),
builder.trustStorePassword());
} catch (ProcessingException e) {
- throw new ClientException("Failed to connect url '%s'", builder.url());
+ throw new ClientException("Failed to connect url '%s'",
+ builder.url());
}
try {
this.initManagers(this.client, builder.graph());
@@ -67,13 +71,21 @@ public HugeClient(HugeClientBuilder builder) {
}
}
+ public HugeClient(HugeClient client, String graph) {
+ this.borrowedClient = true;
+ this.client = client.client;
+ this.initManagers(this.client, graph);
+ }
+
public static HugeClientBuilder builder(String url, String graph) {
return new HugeClientBuilder(url, graph);
}
@Override
public void close() {
- this.client.close();
+ if (!this.borrowedClient) {
+ this.client.close();
+ }
}
private void initManagers(RestClient client, String graph) {
@@ -97,7 +109,7 @@ private void initManagers(RestClient client, String graph) {
private void checkServerApiVersion() {
VersionUtil.Version apiVersion = VersionUtil.Version.of(
this.version.getApiVersion());
- VersionUtil.check(apiVersion, "0.38", "0.67",
+ VersionUtil.check(apiVersion, "0.38", "0.68",
"hugegraph-api in server");
this.client.apiVersion(apiVersion);
}
diff --git a/src/test/java/com/baidu/hugegraph/api/ApiTestSuite.java b/src/test/java/com/baidu/hugegraph/api/ApiTestSuite.java
index f81cfa8a..62329dba 100644
--- a/src/test/java/com/baidu/hugegraph/api/ApiTestSuite.java
+++ b/src/test/java/com/baidu/hugegraph/api/ApiTestSuite.java
@@ -67,6 +67,7 @@
TaskApiTest.class,
JobApiTest.class,
RestoreApiTest.class,
+ GraphsApiTest.class,
CommonTraverserApiTest.class,
KoutApiTest.class,
diff --git a/src/test/java/com/baidu/hugegraph/api/GraphsApiTest.java b/src/test/java/com/baidu/hugegraph/api/GraphsApiTest.java
new file mode 100644
index 00000000..b9c9cfbe
--- /dev/null
+++ b/src/test/java/com/baidu/hugegraph/api/GraphsApiTest.java
@@ -0,0 +1,330 @@
+/*
+ * Copyright 2017 HugeGraph Authors
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.baidu.hugegraph.api;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Test;
+
+import com.baidu.hugegraph.driver.HugeClient;
+import com.baidu.hugegraph.driver.SchemaManager;
+import com.baidu.hugegraph.rest.ClientException;
+import com.baidu.hugegraph.structure.graph.Edge;
+import com.baidu.hugegraph.structure.graph.Vertex;
+import com.baidu.hugegraph.structure.gremlin.ResultSet;
+import com.baidu.hugegraph.testutil.Assert;
+import com.google.common.collect.ImmutableSet;
+
+public class GraphsApiTest extends BaseApiTest {
+
+ private static final String GRAPH2 = "hugegraph2";
+ private static final String CONFIG2_PATH =
+ "src/test/resources/hugegraph-create.properties";
+
+ private static final String GRAPH3 = "hugegraph3";
+ private static final String CONFIG3_PATH =
+ "src/test/resources/hugegraph-clone.properties";
+
+ @Override
+ @After
+ public void teardown() {
+ for (String g : ImmutableSet.of(GRAPH2, GRAPH3)) {
+ try {
+ graphsAPI.get(g);
+ } catch (Exception ognored) {
+ continue;
+ }
+ graphsAPI.drop(g, "I'm sure to drop the graph");
+ }
+ }
+
+ @Test
+ public void testCreateAndDropGraph() {
+ int initialGraphNumber = graphsAPI.list().size();
+
+ // Create new graph dynamically
+ String config;
+ try {
+ config = FileUtils.readFileToString(new File(CONFIG2_PATH),
+ StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ throw new ClientException("Failed to read config file: %s",
+ CONFIG2_PATH);
+ }
+ Map result = graphsAPI.create(GRAPH2, null, config);
+ Assert.assertEquals(2, result.size());
+ Assert.assertEquals(GRAPH2, result.get("name"));
+ Assert.assertEquals("rocksdb", result.get("backend"));
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ HugeClient client = new HugeClient(baseClient(), GRAPH2);
+ // Insert graph schema and data
+ initPropertyKey(client);
+ initVertexLabel(client);
+ initEdgeLabel(client);
+
+ List vertices = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Vertex vertex = new Vertex("person").property("name", "person" + i)
+ .property("city", "Beijing")
+ .property("age", 19);
+ vertices.add(vertex);
+ }
+ vertices = client.graph().addVertices(vertices);
+
+ List edges = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Edge edge = new Edge("knows").source(vertices.get(i))
+ .target(vertices.get((i + 1) % 100))
+ .property("date", "2016-01-10");
+ edges.add(edge);
+ }
+ client.graph().addEdges(edges, false);
+
+ // Query vertices and edges count from new created graph
+ ResultSet resultSet = client.gremlin().gremlin("g.V().count()")
+ .execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ // Clear graph schema and data from new created graph
+ graphsAPI.clear(GRAPH2, "I'm sure to delete all data");
+
+ resultSet = client.gremlin().gremlin("g.V().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ Assert.assertTrue(client.schema().getPropertyKeys().isEmpty());
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ // Remove new created graph dynamically
+ graphsAPI.drop(GRAPH2, "I'm sure to drop the graph");
+
+ Assert.assertEquals(initialGraphNumber, graphsAPI.list().size());
+ }
+
+ @Test
+ public void testCloneAndDropGraph() {
+ int initialGraphNumber = graphsAPI.list().size();
+
+ // Clone a new graph from exist a graph dynamically
+ String config;
+ try {
+ config = FileUtils.readFileToString(new File(CONFIG3_PATH),
+ StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ throw new ClientException("Failed to read config file: %s",
+ CONFIG3_PATH);
+ }
+ Map result = graphsAPI.create(GRAPH3, "hugegraph",
+ config);
+ Assert.assertEquals(2, result.size());
+ Assert.assertEquals(GRAPH3, result.get("name"));
+ Assert.assertEquals("rocksdb", result.get("backend"));
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ HugeClient client = new HugeClient(baseClient(), GRAPH3);
+ // Insert graph schema and data
+ initPropertyKey(client);
+ initVertexLabel(client);
+ initEdgeLabel(client);
+
+ List vertices = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Vertex vertex = new Vertex("person").property("name", "person" + i)
+ .property("city", "Beijing")
+ .property("age", 19);
+ vertices.add(vertex);
+ }
+ vertices = client.graph().addVertices(vertices);
+
+ List edges = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Edge edge = new Edge("knows").source(vertices.get(i))
+ .target(vertices.get((i + 1) % 100))
+ .property("date", "2016-01-10");
+ edges.add(edge);
+ }
+ client.graph().addEdges(edges, false);
+
+ // Query vertices and edges count from new created graph
+ ResultSet resultSet = client.gremlin().gremlin("g.V().count()")
+ .execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ // Clear graph schema and data from new created graph
+ graphsAPI.clear(GRAPH3, "I'm sure to delete all data");
+
+ resultSet = client.gremlin().gremlin("g.V().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ Assert.assertTrue(client.schema().getPropertyKeys().isEmpty());
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ // Remove new created graph dynamically
+ graphsAPI.drop(GRAPH3, "I'm sure to drop the graph");
+
+ Assert.assertEquals(initialGraphNumber, graphsAPI.list().size());
+ }
+
+ @Test
+ public void testCloneAndDropGraphWithoutConfig() {
+ int initialGraphNumber = graphsAPI.list().size();
+
+ // Clone a new graph from exist a graph dynamically
+ String config = null;
+ Map result = graphsAPI.create(GRAPH3, "hugegraph",
+ config);
+ Assert.assertEquals(2, result.size());
+ Assert.assertEquals(GRAPH3, result.get("name"));
+ Assert.assertEquals("rocksdb", result.get("backend"));
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ HugeClient client = new HugeClient(baseClient(), GRAPH3);
+ // Insert graph schema and data
+ initPropertyKey(client);
+ initVertexLabel(client);
+ initEdgeLabel(client);
+
+ List vertices = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Vertex vertex = new Vertex("person").property("name", "person" + i)
+ .property("city", "Beijing")
+ .property("age", 19);
+ vertices.add(vertex);
+ }
+ vertices = client.graph().addVertices(vertices);
+
+ List edges = new ArrayList<>(100);
+ for (int i = 0; i < 100; i++) {
+ Edge edge = new Edge("knows").source(vertices.get(i))
+ .target(vertices.get((i + 1) % 100))
+ .property("date", "2016-01-10");
+ edges.add(edge);
+ }
+ client.graph().addEdges(edges, false);
+
+ // Query vertices and edges count from new created graph
+ ResultSet resultSet = client.gremlin().gremlin("g.V().count()")
+ .execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(100, resultSet.iterator().next().getInt());
+
+ // Clear graph schema and data from new created graph
+ graphsAPI.clear(GRAPH3, "I'm sure to delete all data");
+
+ resultSet = client.gremlin().gremlin("g.V().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ resultSet = client.gremlin().gremlin("g.E().count()").execute();
+ Assert.assertEquals(0, resultSet.iterator().next().getInt());
+
+ Assert.assertTrue(client.schema().getPropertyKeys().isEmpty());
+
+ Assert.assertEquals(initialGraphNumber + 1, graphsAPI.list().size());
+
+ // Remove new created graph dynamically
+ graphsAPI.drop(GRAPH3, "I'm sure to drop the graph");
+
+ Assert.assertEquals(initialGraphNumber, graphsAPI.list().size());
+ }
+
+ protected static void initPropertyKey(HugeClient client) {
+ SchemaManager schema = client.schema();
+ schema.propertyKey("name").asText().ifNotExist().create();
+ schema.propertyKey("age").asInt().ifNotExist().create();
+ schema.propertyKey("city").asText().ifNotExist().create();
+ schema.propertyKey("lang").asText().ifNotExist().create();
+ schema.propertyKey("date").asDate().ifNotExist().create();
+ schema.propertyKey("price").asInt().ifNotExist().create();
+ schema.propertyKey("weight").asDouble().ifNotExist().create();
+ }
+
+ protected static void initVertexLabel(HugeClient client) {
+ SchemaManager schema = client.schema();
+
+ schema.vertexLabel("person")
+ .properties("name", "age", "city")
+ .primaryKeys("name")
+ .nullableKeys("city")
+ .ifNotExist()
+ .create();
+
+ schema.vertexLabel("software")
+ .properties("name", "lang", "price")
+ .primaryKeys("name")
+ .nullableKeys("price")
+ .ifNotExist()
+ .create();
+
+ schema.vertexLabel("book")
+ .useCustomizeStringId()
+ .properties("name", "price")
+ .nullableKeys("price")
+ .ifNotExist()
+ .create();
+ }
+
+ protected static void initEdgeLabel(HugeClient client) {
+ SchemaManager schema = client.schema();
+
+ schema.edgeLabel("knows")
+ .sourceLabel("person")
+ .targetLabel("person")
+ .multiTimes()
+ .properties("date", "city")
+ .sortKeys("date")
+ .nullableKeys("city")
+ .ifNotExist()
+ .create();
+
+ schema.edgeLabel("created")
+ .sourceLabel("person")
+ .targetLabel("software")
+ .properties("date", "city")
+ .nullableKeys("city")
+ .ifNotExist()
+ .create();
+ }
+}
diff --git a/src/test/resources/hugegraph-clone.properties b/src/test/resources/hugegraph-clone.properties
new file mode 100644
index 00000000..712b6636
--- /dev/null
+++ b/src/test/resources/hugegraph-clone.properties
@@ -0,0 +1,3 @@
+store=hugegraph3
+rocksdb.data_path=./hg3
+rocksdb.wal_path=./hg3
diff --git a/src/test/resources/hugegraph-create.properties b/src/test/resources/hugegraph-create.properties
new file mode 100644
index 00000000..cc022b02
--- /dev/null
+++ b/src/test/resources/hugegraph-create.properties
@@ -0,0 +1,6 @@
+gremlin.graph=com.baidu.hugegraph.auth.HugeFactoryAuthProxy
+backend=rocksdb
+serializer=binary
+store=hugegraph2
+rocksdb.data_path=./hg2
+rocksdb.wal_path=./hg2