diff --git a/hugegraph-api/pom.xml b/hugegraph-api/pom.xml index 8ea074ee5e..d90316d514 100644 --- a/hugegraph-api/pom.xml +++ b/hugegraph-api/pom.xml @@ -86,7 +86,7 @@ - 0.37.0.0 + 0.38.0.0 diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedCrosspointsAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedCrosspointsAPI.java index 797e6ec9ff..76b33a385d 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedCrosspointsAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedCrosspointsAPI.java @@ -19,6 +19,11 @@ package com.baidu.hugegraph.api.traversers; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_DEGREE; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; + import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -41,7 +46,6 @@ import com.baidu.hugegraph.HugeGraph; import com.baidu.hugegraph.api.API; -import com.baidu.hugegraph.api.filter.StatusFilter.Status; import com.baidu.hugegraph.backend.id.Id; import com.baidu.hugegraph.core.GraphManager; import com.baidu.hugegraph.schema.EdgeLabel; @@ -55,8 +59,6 @@ import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; -import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.*; - @Path("graphs/{graph}/traversers/customizedcrosspoints") @Singleton public class CustomizedCrosspointsAPI extends API { @@ -65,7 +67,6 @@ public class CustomizedCrosspointsAPI extends API { @POST @Timed - @Status(Status.CREATED) @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON_WITH_CHARSET) public String post(@Context GraphManager manager, diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedPathsAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedPathsAPI.java index 462aa13fa1..8b9c58a838 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedPathsAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/CustomizedPathsAPI.java @@ -19,6 +19,13 @@ package com.baidu.hugegraph.api.traversers; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_DEGREE; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_SAMPLE; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_WEIGHT; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; + import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -41,7 +48,6 @@ import com.baidu.hugegraph.HugeGraph; import com.baidu.hugegraph.api.API; -import com.baidu.hugegraph.api.filter.StatusFilter.Status; import com.baidu.hugegraph.backend.id.Id; import com.baidu.hugegraph.core.GraphManager; import com.baidu.hugegraph.schema.EdgeLabel; @@ -56,8 +62,6 @@ import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; -import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.*; - @Path("graphs/{graph}/traversers/customizedpaths") @Singleton public class CustomizedPathsAPI extends API { @@ -66,7 +70,6 @@ public class CustomizedPathsAPI extends API { @POST @Timed - @Status(Status.CREATED) @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON_WITH_CHARSET) public String post(@Context GraphManager manager, diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/EdgesAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/EdgesAPI.java index 3ac78446d6..34ac0ffc70 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/EdgesAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/EdgesAPI.java @@ -93,7 +93,7 @@ public String shards(@Context GraphManager manager, HugeGraph g = graph(manager, graph); List shards = g.graphTransaction() .metadata(HugeType.EDGE_OUT, "splits", splitSize); - return manager.serializer(g).writeShards(shards); + return manager.serializer(g).writeList("shards", shards); } @GET diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KneighborAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KneighborAPI.java index b4e1d9fcb0..7c01217173 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KneighborAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KneighborAPI.java @@ -80,6 +80,6 @@ public String get(@Context GraphManager manager, HugeTraverser traverser = new HugeTraverser(g); Set ids = traverser.kneighbor(source, dir, edgeLabel, depth, degree, limit); - return manager.serializer(g).writeIds("vertices", ids); + return manager.serializer(g).writeList("vertices", ids); } } diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KoutAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KoutAPI.java index 4825637778..191fcbb164 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KoutAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/KoutAPI.java @@ -85,6 +85,6 @@ public String get(@Context GraphManager manager, HugeTraverser traverser = new HugeTraverser(g); Set ids = traverser.kout(sourceId, dir, edgeLabel, depth, nearest, degree, capacity, limit); - return manager.serializer(g).writeIds("vertices", ids); + return manager.serializer(g).writeList("vertices", ids); } } diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/NeighborRankAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/NeighborRankAPI.java new file mode 100644 index 0000000000..4b7b38e51b --- /dev/null +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/NeighborRankAPI.java @@ -0,0 +1,159 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.api.traversers; + +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_DEGREE; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEPTH; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; +import static com.baidu.hugegraph.traversal.algorithm.NeighborRankTraverser.MAX_TOP; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.inject.Singleton; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; + +import org.slf4j.Logger; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.api.API; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.core.GraphManager; +import com.baidu.hugegraph.schema.EdgeLabel; +import com.baidu.hugegraph.server.RestServer; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.traversal.algorithm.NeighborRankTraverser; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.Log; +import com.codahale.metrics.annotation.Timed; +import com.fasterxml.jackson.annotation.JsonProperty; + +@Path("graphs/{graph}/traversers/neighborrank") +@Singleton +public class NeighborRankAPI extends API { + + private static final Logger LOG = Log.logger(RestServer.class); + + @POST + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + public String neighborRank(@Context GraphManager manager, + @PathParam("graph") String graph, + RankRequest request) { + E.checkArgumentNotNull(request, "The rank request body can't be null"); + E.checkArgumentNotNull(request.source, + "The source of rank request can't be null"); + E.checkArgument(request.steps != null && !request.steps.isEmpty(), + "The steps of rank request can't be empty"); + E.checkArgument(request.steps.size() <= Long.valueOf(DEFAULT_MAX_DEPTH), + "The steps length of rank request can't exceed %s", + DEFAULT_MAX_DEPTH); + E.checkArgument(request.alpha > 0 && request.alpha <= 1.0, + "The alpha of rank request must be in range (0, 1], " + + "but got '%s'", request.alpha); + + LOG.debug("Graph [{}] get neighbor rank from '{}' with steps '{}', " + + "alpha '{}' and capacity '{}'", graph, request.source, + request.steps, request.alpha, request.capacity); + + Id sourceId = HugeVertex.getIdValue(request.source); + HugeGraph g = graph(manager, graph); + + List steps = steps(g, request); + NeighborRankTraverser traverser; + traverser = new NeighborRankTraverser(g, request.alpha, + request.capacity); + List> ranks = traverser.neighborRank(sourceId, steps); + return manager.serializer(g).writeList("ranks", ranks); + } + + private static List steps(HugeGraph graph, + RankRequest req) { + List steps = new ArrayList<>(); + for (Step step : req.steps) { + steps.add(step.jsonToStep(graph)); + } + return steps; + } + + private static class RankRequest { + + @JsonProperty("source") + private String source; + @JsonProperty("steps") + private List steps; + @JsonProperty("alpha") + private double alpha; + @JsonProperty("capacity") + public long capacity = Long.valueOf(DEFAULT_CAPACITY); + + @Override + public String toString() { + return String.format("RankRequest{source=%s,steps=%s,alpha=%s," + + "capacity=%s}", this.source, this.steps, + this.alpha, this.capacity); + } + } + + private static class Step { + + @JsonProperty("direction") + public Directions direction; + @JsonProperty("labels") + public List labels; + @JsonProperty("degree") + public long degree = Long.valueOf(DEFAULT_DEGREE); + @JsonProperty("top") + public int top = Integer.valueOf(DEFAULT_PATHS_LIMIT); + + @Override + public String toString() { + return String.format("Step{direction=%s,labels=%s,degree=%s," + + "top=%s}", this.direction, this.labels, + this.degree, this.top); + } + + private NeighborRankTraverser.Step jsonToStep(HugeGraph graph) { + E.checkArgument(this.degree > 0 || this.degree == NO_LIMIT, + "The degree must be > 0, but got: %s", + this.degree); + E.checkArgument(this.top > 0 && this.top <= MAX_TOP, + "The top of each layer cannot exceed %s", MAX_TOP); + Map labelIds = new HashMap<>(); + if (this.labels != null) { + for (String label : this.labels) { + EdgeLabel el = graph.edgeLabel(label); + labelIds.put(el.id(), label); + } + } + return new NeighborRankTraverser.Step(this.direction, labelIds, + this.degree, this.top); + } + } +} diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/PersonalRankAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/PersonalRankAPI.java new file mode 100644 index 0000000000..8f6445ffaa --- /dev/null +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/PersonalRankAPI.java @@ -0,0 +1,151 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.api.traversers; + +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_DEGREE; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_LIMIT; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEPTH; +import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; + +import java.util.Map; + +import javax.inject.Singleton; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; + +import org.slf4j.Logger; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.api.API; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.core.GraphManager; +import com.baidu.hugegraph.server.RestServer; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.traversal.algorithm.PersonalRankTraverser; +import com.baidu.hugegraph.util.CollectionUtil; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.InsertionOrderUtil; +import com.baidu.hugegraph.util.Log; +import com.codahale.metrics.annotation.Timed; +import com.fasterxml.jackson.annotation.JsonProperty; + +@Path("graphs/{graph}/traversers/personalrank") +@Singleton +public class PersonalRankAPI extends API { + + private static final Logger LOG = Log.logger(RestServer.class); + + @POST + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + public String personalRank(@Context GraphManager manager, + @PathParam("graph") String graph, + RankRequest request) { + E.checkArgumentNotNull(request, "The rank request body can't be null"); + E.checkArgument(request.source != null, + "The source vertex id of rank request can't be null"); + E.checkArgument(request.label != null, + "The edge label of rank request can't be null"); + E.checkArgument(request.alpha > 0 && request.alpha <= 1.0, + "The alpha of rank request must be in range (0, 1], " + + "but got '%s'", request.alpha); + E.checkArgument(request.degree > 0 || request.degree == NO_LIMIT, + "The degree of rank request must be > 0, but got: %s", + request.degree); + E.checkArgument(request.limit > 0 || request.limit == NO_LIMIT, + "The limit of rank request must be > 0, but got: %s", + request.limit); + E.checkArgument(request.maxDepth > 0 && + request.maxDepth <= Long.valueOf(DEFAULT_MAX_DEPTH), + "The max depth of rank request must be " + + "in range (0, %s], but got '%s'", + DEFAULT_MAX_DEPTH, request.maxDepth); + + LOG.debug("Graph [{}] get personal rank from '{}' with " + + "edge label '{}', alpha '{}', degree '{}', " + + "max depth '{}' and sorted '{}'", + graph, request.source, request.label, request.alpha, + request.degree, request.maxDepth, request.sorted); + + Id sourceId = HugeVertex.getIdValue(request.source); + HugeGraph g = graph(manager, graph); + + PersonalRankTraverser traverser; + traverser = new PersonalRankTraverser(g, request.alpha, request.degree, + request.maxDepth); + Map ranks = traverser.personalRank(sourceId, request.label, + request.withLabel); + ranks = topN(ranks, request.sorted, request.limit); + return manager.serializer(g).writeMap(ranks); + } + + private static Map topN(Map ranks, + boolean sorted, long limit) { + if (sorted) { + ranks = CollectionUtil.sortByValue(ranks, false); + } + if (limit == NO_LIMIT) { + return ranks; + } + Map results = InsertionOrderUtil.newMap(); + long count = 0; + for (Map.Entry entry : ranks.entrySet()) { + results.put(entry.getKey(), entry.getValue()); + if (++count >= limit) { + break; + } + } + return results; + } + + private static class RankRequest { + + @JsonProperty("source") + private String source; + @JsonProperty("label") + private String label; + @JsonProperty("alpha") + private double alpha; + @JsonProperty("degree") + private long degree = Long.valueOf(DEFAULT_DEGREE); + @JsonProperty("limit") + private long limit = Long.valueOf(DEFAULT_LIMIT); + @JsonProperty("max_depth") + private int maxDepth; + @JsonProperty("with_label") + private PersonalRankTraverser.WithLabel withLabel = + PersonalRankTraverser.WithLabel.BOTH_LABEL; + @JsonProperty("sorted") + private boolean sorted = true; + + @Override + public String toString() { + return String.format("RankRequest{source=%s,label=%s,alpha=%s," + + "degree=%s,limit=%s,maxDepth=%s," + + "withLabel=%s,sorted=%s}", + this.source, this.label, this.alpha, + this.degree, this.limit, this.maxDepth, + this.withLabel, this.sorted); + } + } +} diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/ShortestPathAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/ShortestPathAPI.java index b606d15766..8ce94a5775 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/ShortestPathAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/ShortestPathAPI.java @@ -85,6 +85,6 @@ public String get(@Context GraphManager manager, List path = traverser.shortestPath(sourceId, targetId, dir, edgeLabel, depth, degree, skipDegree, capacity); - return manager.serializer(g).writeIds("path", path); + return manager.serializer(g).writeList("path", path); } } diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/VerticesAPI.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/VerticesAPI.java index 2c6aaf8812..df2aaacd63 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/VerticesAPI.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/api/traversers/VerticesAPI.java @@ -93,7 +93,7 @@ public String shards(@Context GraphManager manager, HugeGraph g = graph(manager, graph); List shards = g.graphTransaction() .metadata(HugeType.VERTEX, "splits", splitSize); - return manager.serializer(g).writeShards(shards); + return manager.serializer(g).writeList("shards", shards); } @GET diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/JsonSerializer.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/JsonSerializer.java index 03c88f8828..1be4062245 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/JsonSerializer.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/JsonSerializer.java @@ -61,7 +61,13 @@ public static JsonSerializer instance() { return INSTANCE; } - private String writeList(String label, List list) { + @Override + public String writeMap(Map map) { + return JsonUtil.toJson(map); + } + + @Override + public String writeList(String label, Collection list) { try (ByteArrayOutputStream out = new ByteArrayOutputStream(LBUF_SIZE)) { out.write(String.format("{\"%s\": ", label).getBytes(API.CHARSET)); out.write(JsonUtil.toJson(list).getBytes(API.CHARSET)); @@ -197,15 +203,6 @@ public String writeEdges(Iterator edges, boolean paging) { return this.writeIterator("edges", edges, paging); } - @Override - public String writeIds(String name, Collection ids) { - if (ids instanceof List) { - return this.writeList(name, (List) ids); - } else { - return this.writeList(name, new ArrayList<>(ids)); - } - } - @Override public String writePaths(String name, Collection paths, boolean withCrossPoint, @@ -243,9 +240,4 @@ public String writeCrosspoints(CrosspointsPaths paths, "vertices", iterator); return JsonUtil.toJson(results); } - - @Override - public String writeShards(List shards) { - return this.writeList("shards", shards); - } } diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/Serializer.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/Serializer.java index 9e1e5198a6..7e51a9efd5 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/Serializer.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/serializer/Serializer.java @@ -22,6 +22,7 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; +import java.util.Map; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -37,6 +38,10 @@ public interface Serializer { + public String writeMap(Map map); + + public String writeList(String label, Collection list); + public String writePropertyKey(PropertyKey propertyKey); public String writePropertyKeys(List propertyKeys); @@ -63,8 +68,6 @@ public interface Serializer { public String writeEdges(Iterator edges, boolean paging); - public String writeIds(String name, Collection ids); - public String writePaths(String name, Collection paths, boolean withCrossPoint, Iterator vertices); @@ -76,6 +79,4 @@ public default String writePaths(String name, public String writeCrosspoints(CrosspointsPaths paths, Iterator iterator, boolean withPath); - - public String writeShards(List shards); } diff --git a/hugegraph-api/src/main/java/com/baidu/hugegraph/version/ApiVersion.java b/hugegraph-api/src/main/java/com/baidu/hugegraph/version/ApiVersion.java index ed51000a36..37fd336ea1 100644 --- a/hugegraph-api/src/main/java/com/baidu/hugegraph/version/ApiVersion.java +++ b/hugegraph-api/src/main/java/com/baidu/hugegraph/version/ApiVersion.java @@ -83,10 +83,11 @@ public final class ApiVersion { * [0.35] Issue-287: Support pagination when do index query * [0.36] Issue-360: Support paging for scan api * [0.37] Issue-391: Add skip_super_node for shortest path + * [0.38] Issue-274: Add personalrank and neighborrank RESTful API */ // The second parameter of Version.of() is for IDE running without JAR - public static final Version VERSION = Version.of(ApiVersion.class, "0.37"); + public static final Version VERSION = Version.of(ApiVersion.class, "0.38"); public static final void check() { // Check version of hugegraph-core. Firstly do check from version 0.3 diff --git a/hugegraph-core/pom.xml b/hugegraph-core/pom.xml index d2e3dc69aa..b192b4819b 100644 --- a/hugegraph-core/pom.xml +++ b/hugegraph-core/pom.xml @@ -19,7 +19,7 @@ com.baidu.hugegraph hugegraph-common - 1.5.8 + 1.6.0 diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdPrefixQuery.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdPrefixQuery.java index a6e7ca3101..3a0119a649 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdPrefixQuery.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdPrefixQuery.java @@ -55,6 +55,7 @@ public IdPrefixQuery(HugeType resultType, Query originQuery, this.start = start; this.inclusiveStart = inclusive; this.prefix = prefix; + this.copyBasic(originQuery); } public Id start() { diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdRangeQuery.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdRangeQuery.java index 7547254ea8..4d1bc2660c 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdRangeQuery.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/IdRangeQuery.java @@ -57,6 +57,7 @@ public IdRangeQuery(HugeType resultType, Query originQuery, this.end = end; this.inclusiveStart = inclusiveStart; this.inclusiveEnd = inclusiveEnd; + this.copyBasic(originQuery); } public Id start() { diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/Query.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/Query.java index bdd35bc6c8..15dc48aa9d 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/Query.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/query/Query.java @@ -74,6 +74,15 @@ public Query(HugeType resultType, Query originQuery) { this.showDeleting = false; } + public void copyBasic(Query query) { + this.offset = query.offset(); + this.limit = query.limit(); + this.page = query.page(); + this.capacity = query.capacity(); + this.showHidden = query.showHidden(); + this.showDeleting = query.showDeleting(); + } + public HugeType resultType() { return this.resultType; } diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/serializer/BinarySerializer.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/serializer/BinarySerializer.java index be4a27fd1f..95f3362128 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/serializer/BinarySerializer.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/serializer/BinarySerializer.java @@ -661,9 +661,6 @@ private Query writeStringIndexQuery(ConditionQuery query) { } else { newQuery = new IdPrefixQuery(query, prefix); } - newQuery.page(query.page()); - newQuery.limit(query.limit()); - newQuery.offset(query.offset()); return newQuery; } @@ -707,11 +704,7 @@ private Query writeRangeIndexQuery(ConditionQuery query) { HugeType type = query.resultType(); if (keyEq != null) { Id id = formatIndexId(type, index, keyEq); - Query newQuery = new IdPrefixQuery(query, id); - newQuery.page(query.page()); - newQuery.limit(query.limit()); - newQuery.offset(query.offset()); - return newQuery; + return new IdPrefixQuery(query, id); } if (keyMin == null) { @@ -754,9 +747,6 @@ private Query writeRangeIndexQuery(ConditionQuery query) { } newQuery = new IdRangeQuery(query, start, keyMinEq, max, keyMaxEq); } - newQuery.page(query.page()); - newQuery.limit(query.limit()); - newQuery.offset(query.offset()); return newQuery; } diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/HugeTraverser.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/HugeTraverser.java index 059b715942..279064fcaa 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/HugeTraverser.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/HugeTraverser.java @@ -44,6 +44,7 @@ import com.baidu.hugegraph.backend.query.Query; import com.baidu.hugegraph.backend.tx.GraphTransaction; import com.baidu.hugegraph.iterator.ExtendableIterator; +import com.baidu.hugegraph.iterator.MapperIterator; import com.baidu.hugegraph.schema.SchemaLabel; import com.baidu.hugegraph.structure.HugeEdge; import com.baidu.hugegraph.type.HugeType; @@ -62,8 +63,10 @@ public class HugeTraverser { public static final String DEFAULT_CAPACITY = "10000000"; public static final String DEFAULT_ELEMENTS_LIMIT = "10000000"; public static final String DEFAULT_PATHS_LIMIT = "10"; + public static final String DEFAULT_LIMIT = "100"; public static final String DEFAULT_DEGREE = "10000"; public static final String DEFAULT_SAMPLE = "100"; + public static final String DEFAULT_MAX_DEPTH = "50"; public static final String DEFAULT_WEIGHT = "0"; // Empirical value of scan limit, with which results can be returned in 3s @@ -191,6 +194,15 @@ private Set adjacentVertices(Set vertices, Directions dir, return neighbors; } + protected Iterator adjacentVertices(Id source, Directions dir, + Id label, long limit) { + Iterator edges = this.edgesOfVertex(source, dir, label, limit); + return new MapperIterator<>(edges, e -> { + HugeEdge edge = (HugeEdge) e; + return edge.id().otherVertexId(); + }); + } + protected Iterator edgesOfVertex(Id source, Directions dir, Id label, long limit) { Id[] labels = {}; @@ -213,6 +225,7 @@ protected Iterator edgesOfVertex(Id source, Directions dir, ExtendableIterator results = new ExtendableIterator<>(); for (Id label : labels) { E.checkNotNull(label, "edge label"); + // TODO: limit should be applied to all labels results.extend(this.edgesOfVertex(source, dir, label, limit)); } return results; diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/NeighborRankTraverser.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/NeighborRankTraverser.java new file mode 100644 index 0000000000..5bebb02287 --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/NeighborRankTraverser.java @@ -0,0 +1,303 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.traversal.algorithm; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.ws.rs.core.MultivaluedMap; + +import org.apache.tinkerpop.gremlin.structure.Edge; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.structure.HugeEdge; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.OrderLimitMap; + +public class NeighborRankTraverser extends HugeTraverser { + + public static final int MAX_TOP = 1000; + public static final int DEFAULT_CAPACITY_PER_LAYER = 100000; + + private final double alpha; + private final long capacity; + + public NeighborRankTraverser(HugeGraph graph, double alpha, long capacity) { + super(graph); + checkCapacity(capacity); + this.alpha = alpha; + this.capacity = capacity; + } + + public List> neighborRank(Id source, List steps) { + E.checkArgumentNotNull(source, "The source vertex id can't be null"); + E.checkArgument(!steps.isEmpty(), "The steps can't be empty"); + + MultivaluedMap sources = newMultivalueMap(); + sources.add(source, new Node(source, null)); + + boolean sameLayerTransfer = true; + long access = 0; + // Results: ranks of each layer + List ranks = new ArrayList<>(); + ranks.add(Ranks.of(source, 1.0)); + + for (Step step : steps) { + Ranks lastLayerRanks = ranks.get(ranks.size() - 1); + Map sameLayerIncrRanks = new HashMap<>(); + List adjacencies = new ArrayList<>(); + MultivaluedMap newVertices = newMultivalueMap(); + // Traversal vertices of previous level + for (Map.Entry> entry : sources.entrySet()) { + Id vertex = entry.getKey(); + Iterator edges = edgesOfVertex(vertex, step.direction, + step.labels, null, + step.degree); + + Adjacencies adjacenciesV = new Adjacencies(vertex); + Set sameLayerNodesV = new HashSet<>(); + Map> prevLayerNodesV = new HashMap<>(); + while (edges.hasNext()) { + HugeEdge edge = (HugeEdge) edges.next(); + Id target = edge.id().otherVertexId(); + // Determine whether it belongs to the same layer + if (this.belongToSameLayer(sources.keySet(), target, + sameLayerNodesV)) { + continue; + } + /* + * Determine whether it belongs to the previous layers, + * if it belongs to, update the weight, but don't pass + * any more + */ + if (this.belongToPrevLayers(ranks, target, + prevLayerNodesV)) { + continue; + } + + for (Node n : entry.getValue()) { + // If have loop, skip target + if (n.contains(target)) { + continue; + } + Node newNode = new Node(target, n); + adjacenciesV.add(newNode); + // Add adjacent nodes to sources of next step + newVertices.add(target, newNode); + + checkCapacity(this.capacity, ++access, "neighbor rank"); + } + } + long degree = sameLayerNodesV.size() + prevLayerNodesV.size() + + adjacenciesV.nodes().size(); + if (degree == 0L) { + continue; + } + adjacenciesV.degree(degree); + adjacencies.add(adjacenciesV); + + double incr = lastLayerRanks.getOrDefault(vertex, 0.0) * + this.alpha / degree; + // Merge the increment of the same layer node + this.mergeSameLayerIncrRanks(sameLayerNodesV, incr, + sameLayerIncrRanks); + // Adding contributions to the previous layers + this.contributePrevLayers(ranks, incr, prevLayerNodesV); + } + + Ranks newLayerRanks; + if (sameLayerTransfer) { + // First contribute to last layer, then pass to the new layer + this.contributeLastLayer(sameLayerIncrRanks, lastLayerRanks); + newLayerRanks = this.contributeNewLayer(adjacencies, + lastLayerRanks, + step.capacity); + } else { + // First pass to the new layer, then contribute to last layer + newLayerRanks = this.contributeNewLayer(adjacencies, + lastLayerRanks, + step.capacity); + this.contributeLastLayer(sameLayerIncrRanks, lastLayerRanks); + } + ranks.add(newLayerRanks); + + // Re-init sources + sources = newVertices; + } + return this.topRanks(ranks, steps); + } + + private boolean belongToSameLayer(Set sources, Id target, + Set sameLayerNodes) { + if (sources.contains(target)) { + sameLayerNodes.add(target); + return true; + } else { + return false; + } + } + + private boolean belongToPrevLayers(List ranks, Id target, + Map> prevLayerNodes) { + for (int i = ranks.size() - 2; i > 0; i--) { + Ranks prevLayerRanks = ranks.get(i); + if (prevLayerRanks.containsKey(target)) { + Set nodes = prevLayerNodes.computeIfAbsent(i, HashSet::new); + nodes.add(target); + return true; + } + } + return false; + } + + private void mergeSameLayerIncrRanks(Set sameLayerNodesV, double incr, + Map sameLayerIncrRanks) { + for (Id node : sameLayerNodesV) { + double oldRank = sameLayerIncrRanks.getOrDefault(node, 0.0); + sameLayerIncrRanks.put(node, oldRank + incr); + } + } + + private void contributePrevLayers(List ranks, double incr, + Map> prevLayerNodesV) { + for (Map.Entry> e : prevLayerNodesV.entrySet()) { + Ranks prevLayerRanks = ranks.get(e.getKey()); + for (Id node : e.getValue()) { + double oldRank = prevLayerRanks.get(node); + prevLayerRanks.put(node, oldRank + incr); + } + } + } + + private void contributeLastLayer(Map rankIncrs, + Ranks lastLayerRanks) { + for (Map.Entry entry : rankIncrs.entrySet()) { + double originRank = lastLayerRanks.get(entry.getKey()); + double incrRank = entry.getValue(); + lastLayerRanks.put(entry.getKey(), originRank + incrRank); + } + } + + private Ranks contributeNewLayer(List adjacencies, + Ranks lastLayerRanks, int capacity) { + Ranks newLayerRanks = new Ranks(capacity); + for (Adjacencies adjacenciesV : adjacencies) { + Id source = adjacenciesV.source(); + long degree = adjacenciesV.degree(); + for (Node node : adjacenciesV.nodes()) { + double rank = newLayerRanks.getOrDefault(node.id(), 0.0); + rank += (lastLayerRanks.get(source) * this.alpha / degree); + newLayerRanks.put(node.id(), rank); + } + } + return newLayerRanks; + } + + private List> topRanks(List ranks, + List steps) { + assert ranks.size() > 0; + List> results = new ArrayList<>(ranks.size()); + // The first layer is root node so skip i=0 + results.add(ranks.get(0)); + for (int i = 1; i < ranks.size(); i++) { + Step step = steps.get(i - 1); + Ranks origin = ranks.get(i); + if (origin.size() > step.top) { + results.add(origin.topN(step.top)); + } else { + results.add(origin); + } + } + return results; + } + + public static class Step { + + private final Directions direction; + private final Map labels; + private final long degree; + private final int top; + private final int capacity; + + public Step(Directions direction, Map labels, + long degree, int top) { + this.direction = direction; + this.labels = labels; + this.degree = degree; + this.top = top; + this.capacity = DEFAULT_CAPACITY_PER_LAYER; + } + } + + private static class Adjacencies { + + private final Id source; + private final List nodes; + private long degree; + + public Adjacencies(Id source) { + this.source = source; + this.nodes = new ArrayList<>(); + this.degree = -1L; + } + + public Id source() { + return this.source; + } + + public List nodes() { + return this.nodes; + } + + public void add(Node node) { + this.nodes.add(node); + } + + public long degree() { + E.checkArgument(degree > 0, + "The degree must be > 0, but got %s", degree); + return this.degree; + } + + public void degree(long degree) { + this.degree = degree; + } + } + + private static class Ranks extends OrderLimitMap { + + public Ranks(int capacity) { + super(capacity); + } + + public static Ranks of(Id key, Double value) { + Ranks ranks = new Ranks(1); + ranks.put(key, value); + return ranks; + } + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/PersonalRankTraverser.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/PersonalRankTraverser.java new file mode 100644 index 0000000000..ab96e984af --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/traversal/algorithm/PersonalRankTraverser.java @@ -0,0 +1,178 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.traversal.algorithm; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.schema.EdgeLabel; +import com.baidu.hugegraph.schema.VertexLabel; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.util.E; + +public class PersonalRankTraverser extends HugeTraverser { + + private final double alpha; + private final long degree; + private final int maxDepth; + + public PersonalRankTraverser(HugeGraph graph, double alpha, + long degree, int maxDepth) { + super(graph); + this.alpha = alpha; + this.degree = degree; + this.maxDepth = maxDepth; + } + + public Map personalRank(Id source, String label, + WithLabel withLabel) { + E.checkArgumentNotNull(source, "The source vertex id can't be null"); + E.checkArgumentNotNull(label, "The edge label can't be null"); + + Map ranks = new HashMap<>(); + ranks.put(source, 1.0); + + Id labelId = this.graph().edgeLabel(label).id(); + Directions dir = this.getStartDirection(source, label); + + Set outSeeds = new HashSet<>(); + Set inSeeds = new HashSet<>(); + if (dir == Directions.OUT) { + outSeeds.add(source); + } else { + inSeeds.add(source); + } + + Set rootAdjacencies = new HashSet<>(); + for (long i = 0; i < this.maxDepth; i++) { + Map newRanks = this.calcNewRanks(outSeeds, inSeeds, + labelId, ranks); + ranks = this.compensateRoot(source, newRanks); + if (i == 0) { + rootAdjacencies.addAll(ranks.keySet()); + } + } + // Remove directly connected neighbors + removeAll(ranks, rootAdjacencies); + // Remove unnecessary label + if (withLabel == WithLabel.SAME_LABEL) { + removeAll(ranks, dir == Directions.OUT ? inSeeds : outSeeds); + } else if (withLabel == WithLabel.OTHER_LABEL) { + removeAll(ranks, dir == Directions.OUT ? outSeeds : inSeeds); + } + return ranks; + } + + private Map calcNewRanks(Set outSeeds, Set inSeeds, + Id label, Map ranks) { + Map newRanks = new HashMap<>(); + BiFunction, Directions, Set> neighborIncrRanks; + neighborIncrRanks = (seeds, dir) -> { + Set tmpSeeds = new HashSet<>(); + for (Id seed : seeds) { + Double oldRank = ranks.get(seed); + E.checkState(oldRank != null, "Expect rank of seed exists"); + + Iterator iter = this.adjacentVertices(seed, dir, label, + this.degree); + List neighbors = IteratorUtils.list(iter); + + long degree = neighbors.size(); + if (degree == 0L) { + newRanks.put(seed, oldRank); + continue; + } + double incrRank = oldRank * alpha / degree; + + // Collect all neighbors increment + for (Id neighbor : neighbors) { + tmpSeeds.add(neighbor); + // Assign an initial value when firstly update neighbor rank + double rank = newRanks.getOrDefault(neighbor, 0.0); + newRanks.put(neighbor, rank + incrRank); + } + } + return tmpSeeds; + }; + + Set tmpInSeeds = neighborIncrRanks.apply(outSeeds, Directions.OUT); + Set tmpOutSeeds = neighborIncrRanks.apply(inSeeds, Directions.IN); + + outSeeds.addAll(tmpOutSeeds); + inSeeds.addAll(tmpInSeeds); + return newRanks; + } + + private Map compensateRoot(Id root, Map newRanks) { + double rank = newRanks.getOrDefault(root, 0.0); + rank += (1 - this.alpha); + newRanks.put(root, rank); + return newRanks; + } + + private Directions getStartDirection(Id source, String label) { + // NOTE: The outer layer needs to ensure that the vertex Id is valid + HugeVertex vertex = (HugeVertex) graph().vertices(source).next(); + VertexLabel vertexLabel = vertex.schemaLabel(); + EdgeLabel edgeLabel = this.graph().edgeLabel(label); + Id sourceLabel = edgeLabel.sourceLabel(); + Id targetLabel = edgeLabel.targetLabel(); + + E.checkArgument(edgeLabel.linkWithLabel(vertexLabel.id()), + "The vertex '%s' doesn't link with edge label '%s'", + source, label); + E.checkArgument(!sourceLabel.equals(targetLabel), + "The edge label for personal rank must " + + "link different vertex labels"); + if (sourceLabel.equals(vertexLabel.id())) { + return Directions.OUT; + } else { + assert targetLabel.equals(vertexLabel.id()); + return Directions.IN; + } + } + + private long degreeOfVertex(Id source, Directions dir, Id label) { + return IteratorUtils.count(this.edgesOfVertex(source, dir, label, + this.degree)); + } + + private static void removeAll(Map map, Set keys) { + for (Id key : keys) { + map.remove(key); + } + } + + public enum WithLabel { + SAME_LABEL, + OTHER_LABEL, + BOTH_LABEL + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/version/CoreVersion.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/version/CoreVersion.java index 6548edd6f0..12e2707f82 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/version/CoreVersion.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/version/CoreVersion.java @@ -39,7 +39,7 @@ public class CoreVersion { public static void check() { // Check version of hugegraph-common - VersionUtil.check(CommonVersion.VERSION, "1.5.0", "1.6", + VersionUtil.check(CommonVersion.VERSION, "1.6.0", "1.7", CommonVersion.NAME); } } diff --git a/hugegraph-example/src/main/java/com/baidu/hugegraph/example/Example3.java b/hugegraph-example/src/main/java/com/baidu/hugegraph/example/Example3.java new file mode 100644 index 0000000000..caa212228c --- /dev/null +++ b/hugegraph-example/src/main/java/com/baidu/hugegraph/example/Example3.java @@ -0,0 +1,173 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.example; + +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.schema.SchemaManager; +import com.baidu.hugegraph.util.Log; + +public class Example3 { + + private static final Logger LOG = Log.logger(Example3.class); + + public static void main(String[] args) throws InterruptedException { + LOG.info("Example3 start!"); + + HugeGraph graph = ExampleUtil.loadGraph(); + + Example3.loadNeighborRankData(graph); + Example3.loadPersonalRankData(graph); + + graph.close(); + + HugeGraph.shutdown(30L); + } + + public static void loadNeighborRankData(final HugeGraph graph) { + SchemaManager schema = graph.schema(); + + schema.propertyKey("name").asText().ifNotExist().create(); + + schema.vertexLabel("person") + .properties("name") + .useCustomizeStringId() + .ifNotExist() + .create(); + + schema.vertexLabel("movie") + .properties("name") + .useCustomizeStringId() + .ifNotExist() + .create(); + + schema.edgeLabel("follow") + .sourceLabel("person") + .targetLabel("person") + .ifNotExist() + .create(); + + schema.edgeLabel("like") + .sourceLabel("person") + .targetLabel("movie") + .ifNotExist() + .create(); + + schema.edgeLabel("directedBy") + .sourceLabel("movie") + .targetLabel("person") + .ifNotExist() + .create(); + + graph.tx().open(); + + Vertex O = graph.addVertex(T.label, "person", T.id, "O", "name", "O"); + + Vertex A = graph.addVertex(T.label, "person", T.id, "A", "name", "A"); + Vertex B = graph.addVertex(T.label, "person", T.id, "B", "name", "B"); + Vertex C = graph.addVertex(T.label, "person", T.id, "C", "name", "C"); + Vertex D = graph.addVertex(T.label, "person", T.id, "D", "name", "D"); + + Vertex E = graph.addVertex(T.label, "movie", T.id, "E", "name", "E"); + Vertex F = graph.addVertex(T.label, "movie", T.id, "F", "name", "F"); + Vertex G = graph.addVertex(T.label, "movie", T.id, "G", "name", "G"); + Vertex H = graph.addVertex(T.label, "movie", T.id, "H", "name", "H"); + Vertex I = graph.addVertex(T.label, "movie", T.id, "I", "name", "I"); + Vertex J = graph.addVertex(T.label, "movie", T.id, "J", "name", "J"); + + Vertex K = graph.addVertex(T.label, "person", T.id, "K", "name", "K"); + Vertex L = graph.addVertex(T.label, "person", T.id, "L", "name", "L"); + Vertex M = graph.addVertex(T.label, "person", T.id, "M", "name", "M"); + + O.addEdge("follow", A); + O.addEdge("follow", B); + O.addEdge("follow", C); + D.addEdge("follow", O); + + A.addEdge("follow", B); + A.addEdge("like", E); + A.addEdge("like", F); + + B.addEdge("like", G); + B.addEdge("like", H); + + C.addEdge("like", I); + C.addEdge("like", J); + + E.addEdge("directedBy", K); + F.addEdge("directedBy", B); + F.addEdge("directedBy", L); + + G.addEdge("directedBy", M); + + graph.tx().commit(); + } + + public static void loadPersonalRankData(final HugeGraph graph) { + SchemaManager schema = graph.schema(); + + schema.propertyKey("name").asText().ifNotExist().create(); + + schema.vertexLabel("person") + .properties("name") + .useCustomizeStringId() + .ifNotExist() + .create(); + + schema.vertexLabel("movie") + .properties("name") + .useCustomizeStringId() + .ifNotExist() + .create(); + + schema.edgeLabel("like") + .sourceLabel("person") + .targetLabel("movie") + .ifNotExist() + .create(); + + graph.tx().open(); + + Vertex A = graph.addVertex(T.label, "person", T.id, "A", "name", "A"); + Vertex B = graph.addVertex(T.label, "person", T.id, "B", "name", "B"); + Vertex C = graph.addVertex(T.label, "person", T.id, "C", "name", "C"); + + Vertex a = graph.addVertex(T.label, "movie", T.id, "a", "name", "a"); + Vertex b = graph.addVertex(T.label, "movie", T.id, "b", "name", "b"); + Vertex c = graph.addVertex(T.label, "movie", T.id, "c", "name", "c"); + Vertex d = graph.addVertex(T.label, "movie", T.id, "d", "name", "d"); + + A.addEdge("like", a); + A.addEdge("like", c); + + B.addEdge("like", a); + B.addEdge("like", b); + B.addEdge("like", c); + B.addEdge("like", d); + + C.addEdge("like", c); + C.addEdge("like", d); + + graph.tx().commit(); + } +}