Skip to content

Commit

Permalink
Revert "Support paging for scan api (#361)"
Browse files Browse the repository at this point in the history
This reverts commit 60d8d8f.
  • Loading branch information
javeme authored Apr 2, 2019
1 parent 60d8d8f commit 007bb95
Show file tree
Hide file tree
Showing 10 changed files with 17 additions and 108 deletions.
2 changes: 1 addition & 1 deletion hugegraph-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
</addDefaultSpecificationEntries>
</manifest>
<manifestEntries>
<Implementation-Version>0.36.0.0</Implementation-Version>
<Implementation-Version>0.35.0.0</Implementation-Version>
</manifestEntries>
</archive>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import com.baidu.hugegraph.api.filter.CompressInterceptor.Compress;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand Down Expand Up @@ -102,19 +101,16 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end,
@QueryParam("page") String page) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}) ",
graph, start, end);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.EDGE_OUT);
query.scan(start, end);
query.limit(Query.DEFAULT_CAPACITY);
query.page(page);
Iterator<Edge> edges = g.edges(query);

return manager.serializer(g).writeEdges(edges, true);
return manager.serializer(g).writeEdges(edges, false);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import com.baidu.hugegraph.api.graph.VertexAPI;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand Down Expand Up @@ -102,19 +101,16 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end,
@QueryParam("page") String page) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}) ",
graph, start, end);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(start, end);
query.limit(Query.DEFAULT_CAPACITY);
query.page(page);
Iterator<Vertex> vertices = g.vertices(query);

return manager.serializer(g).writeVertices(vertices, true);
return manager.serializer(g).writeVertices(vertices, false);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import com.baidu.hugegraph.api.API;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.EdgeLabel;
import com.baidu.hugegraph.schema.IndexLabel;
import com.baidu.hugegraph.schema.PropertyKey;
Expand Down Expand Up @@ -101,16 +100,7 @@ private String writeIterator(String label, Iterator<?> itor,

// Write page
if (paging) {
String page;
if (itor instanceof GraphTraversal<?, ?>) {
page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
} else if (itor instanceof Metadatable) {
page = (String) ((Metadatable) itor).metadata("page");
} else {
throw new HugeException(
"Error type '%s' of paging iterator '%s'",
itor.getClass(), itor);
}
String page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
if (page != null) {
page = String.format(",\"page\": \"%s\"", page);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,17 +75,14 @@ public final class ApiVersion {
* [0.29] Issue-39: Add rays and rings RESTful API
* [0.30] Issue-32: Change index create API to return indexLabel and task id
* [0.31] Issue-182: Support restore graph in restoring and merging mode
*
* version 0.9:
* [0.32] Issue-250: Keep depth and degree consistent for traverser api
* [0.33] Issue-305: Implement customized paths and crosspoints RESTful API
* [0.34] Issue-307: Let VertexAPI use simplified property serializer
* [0.35] Issue-287: Support pagination when do index query
* [0.36] Issue-360: Support paging for scan api
*/

// The second parameter of Version.of() is for IDE running without JAR
public static final Version VERSION = Version.of(ApiVersion.class, "0.36");
public static final Version VERSION = Version.of(ApiVersion.class, "0.35");

public static final void check() {
// Check version of hugegraph-core. Firstly do check from version 0.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ private Iterator<HugeVertex> queryVerticesByIds(IdQuery query) {

@Override
protected Iterator<HugeEdge> queryEdgesFromBackend(Query query) {
if (query.empty() || query.paging()) {
// Query all edges or query edges in paging, don't cache it
if (query.empty()) {
// Query all edges, don't cache it
return super.queryEdgesFromBackend(query);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,21 +203,14 @@ protected RowIterator queryByCond(Session session, ConditionQuery query) {
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard, query.page());
return this.queryByRange(session, shard);
}
throw new NotSupportException("query: %s", query);
}

protected RowIterator queryByRange(Session session, Shard shard,
String page) {
protected RowIterator queryByRange(Session session, Shard shard) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import com.baidu.hugegraph.exception.NotSupportException;
import com.baidu.hugegraph.iterator.ExtendableIterator;
import com.baidu.hugegraph.type.HugeType;
import com.baidu.hugegraph.util.Bytes;
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.Log;
import com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -194,21 +193,14 @@ protected BackendColumnIterator queryByCond(Session session,
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard, query.page());
return this.queryByRange(session, shard);
}
throw new NotSupportException("query: %s", query);
}

protected BackendColumnIterator queryByRange(Session session, Shard shard,
String page) {
protected BackendColumnIterator queryByRange(Session session, Shard shard) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
Expand Down Expand Up @@ -52,7 +51,6 @@
import com.baidu.hugegraph.config.CoreOptions;
import com.baidu.hugegraph.exception.LimitExceedException;
import com.baidu.hugegraph.exception.NotFoundException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.testutil.Assert;
import com.baidu.hugegraph.testutil.FakeObjects.FakeEdge;
Expand Down Expand Up @@ -1747,32 +1745,6 @@ public void testScanEdge() {
Assert.assertEquals(18, edges.size());
}

@Test
public void testScanEdgeInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init18Edges();

List<Edge> edges = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.EDGE);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = "";
while (page != null) {
query.page(page);
Iterator<Edge> iterator = graph.edges(query);
while (iterator.hasNext()) {
edges.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(18, edges.size());
}

@Test
public void testRemoveEdge() {
HugeGraph graph = graph();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.backend.tx.GraphTransaction;
import com.baidu.hugegraph.exception.NoIndexException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.PropertyKey;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.schema.VertexLabel;
Expand Down Expand Up @@ -3009,32 +3008,6 @@ public void testScanVertex() {
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init10Vertices();

List<Vertex> vertexes = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = "";
while (page != null) {
query.page(page);
Iterator<Vertex> iterator = graph.vertices(query);
while (iterator.hasNext()) {
vertexes.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexWithSplitSizeLt1MB() {
HugeGraph graph = graph();
Expand Down

0 comments on commit 007bb95

Please sign in to comment.