Skip to content

Commit

Permalink
Add travis api-test
Browse files Browse the repository at this point in the history
Change-Id: I01d10dc092f2f2147155bc2b523b49e741d8d571
  • Loading branch information
Linary committed Jan 7, 2019
1 parent 7164a45 commit 8652bd9
Show file tree
Hide file tree
Showing 10 changed files with 113 additions and 11 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ before_script:
- $TRAVIS_DIR/install-backend.sh

script:
- mvn test -P core-test,$BACKEND
- mvn test -P core-test,$BACKEND &&
mvn package -DskipTests && $TRAVIS_DIR/start-server.sh && mvn test -P api-test,$BACKEND
- |
if [ "$BACKEND" == "memory" ]; then
mvn test -P unit-test
Expand Down
2 changes: 1 addition & 1 deletion hugegraph-dist/src/assembly/travis/install-cassandra.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fi
# decompress cassandra
cp $HOME/downloads/${CASSA_TAR} ${CASSA_TAR} && tar xzf ${CASSA_TAR}

# Using tmpfs for the Cassandra data directory reduces travis test runtime by
# using tmpfs for the Cassandra data directory reduces travis test runtime
sudo mkdir /mnt/ramdisk
sudo mount -t tmpfs -o size=1024m tmpfs /mnt/ramdisk
sudo ln -s /mnt/ramdisk $CASSA_PACKAGE/data
Expand Down
2 changes: 1 addition & 1 deletion hugegraph-dist/src/assembly/travis/install-hbase.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fi
# decompress hbase
sudo cp $HOME/downloads/${HBASE_TAR} ${HBASE_TAR} && tar xzf ${HBASE_TAR}

# Using tmpfs for the Hbase data directory reduces travis test runtime by
# using tmpfs for the Hbase data directory reduces travis test runtime
sudo mkdir /mnt/ramdisk
sudo mount -t tmpfs -o size=1024m tmpfs /mnt/ramdisk
sudo ln -s /mnt/ramdisk /tmp/hbase
Expand Down
22 changes: 22 additions & 0 deletions hugegraph-dist/src/assembly/travis/start-server.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/bin/bash

set -ev

VERSION=`mvn help:evaluate -Dexpression=project.version -q -DforceStdout`
BASE_DIR=hugegraph-$VERSION
BIN=$BASE_DIR/bin
CONF=$BASE_DIR/conf/hugegraph.properties

declare -A backend_serializer_map=(["memory"]="text" ["cassandra"]="cassandra" \
["scylladb"]="scylladb" ["mysql"]="mysql" \
["hbase"]="hbase" ["rocksdb"]="binary")

SERIALIZER=${backend_serializer_map[$BACKEND]}

sed -i "s/backend=.*/backend=$BACKEND/" $CONF
sed -i "s/serializer=.*/serializer=$SERIALIZER/" $CONF

# Append schema.sync_deletion=true to config file
echo "schema.sync_deletion=true" >> $CONF

$BIN/init-store.sh && $BIN/start-hugegraph.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
Expand Down
12 changes: 12 additions & 0 deletions hugegraph-test/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,18 @@
</includes>
</configuration>
</execution>
<execution>
<id>api-test</id>
<configuration>
<testSourceDirectory>${basedir}/src/main/java/
</testSourceDirectory>
<testClassesDirectory>${basedir}/target/classes/
</testClassesDirectory>
<includes>
<include>**/ApiTestSuite.java</include>
</includes>
</configuration>
</execution>
<execution>
<id>tinkerpop-structure-test</id>
<configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package com.baidu.hugegraph.api;

import org.apache.commons.configuration.ConfigurationException;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
Expand All @@ -40,7 +39,7 @@
public class ApiTestSuite {

@BeforeClass
public static void initEnv() throws ConfigurationException {
public static void initEnv() {
RegisterUtil.registerBackends();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import java.util.stream.Collectors;

import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
Expand Down Expand Up @@ -53,10 +55,8 @@ public class BaseApiTest {
private static final String SCHEMA_PKS = "/schema/propertykeys";
private static final String SCHEMA_VLS = "/schema/vertexlabels";
private static final String SCHEMA_ELS = "/schema/edgelabels";
@SuppressWarnings("unused")
private static final String SCHEMA_ILS = "/schema/indexlabels";
private static final String GRAPH_VERTEX = "/graph/vertices";
@SuppressWarnings("unused")
private static final String GRAPH_EDGE = "/graph/edges";

private static RestClient client;
Expand Down Expand Up @@ -329,6 +329,50 @@ protected static String getVertexId(String label, String key, String value)
return (String) list.get(0).get("id");
}

protected static void clearGraph() {
Consumer<String> consumer = (urlSuffix) -> {
String path = URL_PREFIX + urlSuffix;
String type = urlSuffix.substring(urlSuffix.lastIndexOf('/') + 1);
Response r = client.get(path);
if (r.getStatus() != 200) {
throw new HugeException("Failed to list " + type);
}
String content = r.readEntity(String.class);
List<Map> list = readList(content, type, Map.class);
List<Object> ids = list.stream().map(e -> e.get("id"))
.collect(Collectors.toList());
ids.forEach(id -> {
client.delete(path, (String) id);
});
};

consumer.accept(GRAPH_EDGE);
consumer.accept(GRAPH_VERTEX);
}

protected static void clearSchema() {
Consumer<String> consumer = (urlSuffix) -> {
String path = URL_PREFIX + urlSuffix;
String type = urlSuffix.substring(urlSuffix.lastIndexOf('/') + 1);
Response r = client.get(path);
if (r.getStatus() != 200) {
throw new HugeException("Failed to list " + type);
}
String content = r.readEntity(String.class);
List<Map> list = readList(content, type, Map.class);
List<Object> names = list.stream().map(e -> e.get("name"))
.collect(Collectors.toList());
names.forEach(name -> {
client.delete(path, (String) name);
});
};

consumer.accept(SCHEMA_ILS);
consumer.accept(SCHEMA_ELS);
consumer.accept(SCHEMA_VLS);
consumer.accept(SCHEMA_PKS);
}

protected static String parseId(String content) throws IOException {
Map<?, ?> map = mapper.readValue(content, Map.class);
return (String) map.get("id");
Expand All @@ -354,6 +398,11 @@ protected static <T> List<T> readList(String content,
}

protected static void clearData() {
clearGraph();
clearSchema();
}

protected static void truncate() {
String token = "162f7848-0b6d-4faf-b557-3a0797869c55";
String message = "I'm sure to delete all data";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package com.baidu.hugegraph.core;

import org.apache.commons.configuration.ConfigurationException;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -53,7 +52,7 @@ public class CoreTestSuite {
private static HugeGraph graph = null;

@BeforeClass
public static void initEnv() throws ConfigurationException {
public static void initEnv() {
RegisterUtil.registerBackends();
}

Expand All @@ -65,7 +64,7 @@ public static void init() {
}

@AfterClass
public static void clear() throws Exception {
public static void clear() {
if (graph == null) {
return;
}
Expand Down
21 changes: 21 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -392,6 +392,27 @@
</plugins>
</build>
</profile>
<profile>
<id>api-test</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20</version>
<executions>
<execution>
<id>api-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>test</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>tinkerpop-structure-test</id>
<build>
Expand Down

0 comments on commit 8652bd9

Please sign in to comment.