Skip to content

Commit

Permalink
improve index query and ids query
Browse files Browse the repository at this point in the history
changes:
 * do not return duplicate records when querying by ids
 * tx cache override queryVerticesFromBackend() method

Change-Id: Ib0116f2faf0d0da9ec40ea335c87950e1c6e7276
  • Loading branch information
javeme committed Oct 11, 2018
1 parent eb6aee0 commit daa7eee
Show file tree
Hide file tree
Showing 6 changed files with 263 additions and 240 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ public interface Cache {

public void update(Id id, Object value);

public void updateIfAbsent(Id id, Object value) ;
public void updateIfAbsent(Id id, Object value);

public void updateIfPresent(Id id, Object value);

public void invalidate(Id id);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,24 +23,24 @@
import java.util.Iterator;
import java.util.List;

import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;

import com.baidu.hugegraph.HugeGraph;
import com.baidu.hugegraph.backend.BackendException;
import com.baidu.hugegraph.backend.cache.CachedBackendStore.QueryId;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.ConditionQueryFlatten;
import com.baidu.hugegraph.backend.query.IdQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.BackendStore;
import com.baidu.hugegraph.backend.tx.GraphTransaction;
import com.baidu.hugegraph.config.CoreOptions;
import com.baidu.hugegraph.config.HugeConfig;
import com.baidu.hugegraph.schema.EdgeLabel;
import com.baidu.hugegraph.iterator.ExtendableIterator;
import com.baidu.hugegraph.schema.IndexLabel;
import com.baidu.hugegraph.structure.HugeEdge;
import com.baidu.hugegraph.structure.HugeEdgeProperty;
import com.baidu.hugegraph.structure.HugeVertex;
import com.baidu.hugegraph.structure.HugeVertexProperty;
import com.baidu.hugegraph.type.HugeType;
import com.google.common.collect.ImmutableList;

public class CachedGraphTransaction extends GraphTransaction {
Expand Down Expand Up @@ -72,46 +72,67 @@ private Cache cache(String prefix, int capacity, long expire) {
}

@Override
public Iterator<Vertex> queryVertices(Object... vertexIds) {
List<Vertex> vertices = new ArrayList<>(vertexIds.length);
for (Object vertexId : vertexIds) {
if (vertexId == null) {
continue;
}
Id vid = HugeVertex.getIdValue(vertexId);
Object v = this.verticesCache.getOrFetch(vid, id -> {
Iterator<Vertex> iterator = super.queryVertices(id);
return iterator.hasNext() ? iterator.next() : null;
});
if (v != null) {
vertices.add((Vertex) v);
public Iterator<HugeVertex> queryVerticesFromBackend(Query query) {
if (!query.ids().isEmpty() && query.conditions().isEmpty()) {
return this.queryVerticesByIds((IdQuery) query);
}
// just for debug
else if (query instanceof ConditionQuery){
ExtendableIterator<HugeVertex> rs = new ExtendableIterator<>();
for (ConditionQuery cq: ConditionQueryFlatten.flatten(
(ConditionQuery) query)) {
Query q = this.optimizeQuery(cq);
if (!q.ids().isEmpty()) {
rs.extend(this.queryVerticesByIds((IdQuery) q));
} else if (!q.empty()) {
rs.extend(super.queryVerticesFromBackend(q));
}
}
return rs;
// just for debug
} else {
return super.queryVerticesFromBackend(query);
}
return vertices.iterator();
}

@Override
public Iterator<Vertex> queryVertices(Query query) {
if (!query.ids().isEmpty() && query.conditions().isEmpty()) {
return this.queryVertices(query.ids().toArray());
} else {
return super.queryVertices(query);
private Iterator<HugeVertex> queryVerticesByIds(IdQuery query) {
IdQuery newQuery = new IdQuery(HugeType.VERTEX, query);
List<HugeVertex> vertices = new ArrayList<>(query.ids().size());
for (Id vertexId : query.ids()) {
Object vertex = this.verticesCache.get(vertexId);
if (vertex != null) {
vertices.add((HugeVertex) vertex);
} else {
newQuery.query(vertexId);
}
}
if (vertices.isEmpty()) {
newQuery = query;
}
if (!newQuery.empty()) {
Iterator<HugeVertex> rs = super.queryVerticesFromBackend(newQuery);
while (rs.hasNext()) {
HugeVertex vertex = rs.next();
vertices.add(vertex);
this.verticesCache.update(vertex.id(), vertex);
}
}
return vertices.iterator();
}

@Override
public Iterator<Edge> queryEdges(Query query) {
public Iterator<HugeEdge> queryEdgesFromBackend(Query query) {
if (query.empty()) {
// Query all edges, don't cache it
return super.queryEdges(query);
return super.queryEdgesFromBackend(query);
}

Id id = new QueryId(query);
@SuppressWarnings("unchecked")
List<Edge> edges = (List<Edge>) this.edgesCache.get(id);
List<HugeEdge> edges = (List<HugeEdge>) this.edgesCache.get(id);
if (edges == null) {
// Iterator can't be cached, caching list instead
edges = ImmutableList.copyOf(super.queryEdges(query));
edges = ImmutableList.copyOf(super.queryEdgesFromBackend(query));
if (edges.size() <= MAX_CACHE_EDGES_PER_QUERY) {
this.edgesCache.update(id, edges);
}
Expand Down Expand Up @@ -140,58 +161,43 @@ public void removeVertex(HugeVertex vertex) {

@Override
public <V> void addVertexProperty(HugeVertexProperty<V> prop) {
// Update vertex cache
this.verticesCache.invalidate(prop.element().id());

super.addVertexProperty(prop);
}

@Override
public <V> void removeVertexProperty(HugeVertexProperty<V> prop) {
// Update vertex cache
this.verticesCache.invalidate(prop.element().id());
HugeVertex vertex = prop.element();
this.verticesCache.updateIfPresent(vertex.id(), vertex);

super.removeVertexProperty(prop);
}

@Override
public HugeEdge addEdge(HugeEdge edge) {
// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();

return super.addEdge(edge);
}

@Override
public void removeEdge(HugeEdge edge) {
// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();
public <V> void removeVertexProperty(HugeVertexProperty<V> prop) {
super.removeVertexProperty(prop);

super.removeEdge(edge);
// Update vertex cache
HugeVertex vertex = prop.element();
this.verticesCache.updateIfPresent(vertex.id(), vertex);
}

@Override
public void removeEdges(EdgeLabel edgeLabel) {
super.removeEdges(edgeLabel);

// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();
public void commit() throws BackendException {
if (!this.edgesInTx().isEmpty()) {
// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();
}
super.commit();
}

@Override
public <V> void addEdgeProperty(HugeEdgeProperty<V> prop) {
// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();

super.addEdgeProperty(prop);
}
public void rollback() throws BackendException {
// Update vertex cache
for (Id id : this.verticesInTx()) {
this.verticesCache.invalidate(id);
}

@Override
public <V> void removeEdgeProperty(HugeEdgeProperty<V> prop) {
// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();

super.removeEdgeProperty(prop);
super.rollback();
}

@Override
Expand All @@ -201,17 +207,4 @@ public void removeIndex(IndexLabel indexLabel) {

super.removeIndex(indexLabel);
}

@Override
public void rollback() throws BackendException {
// Update vertex cache
for (Id id : this.verticesInTx()) {
this.verticesCache.invalidate(id);
}

// TODO: Use a more precise strategy to update the edge cache
this.edgesCache.clear();

super.rollback();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,16 @@ public void updateIfAbsent(Id id, Object value) {
this.write(id, value);
}

@Watched(prefix = "ramcache")
@Override
public void updateIfPresent(Id id, Object value) {
if (id == null || value == null ||
this.capacity <= 0 || !this.map.containsKey(id)) {
return;
}
this.write(id, value);
}

@Watched(prefix = "ramcache")
@Override
public void invalidate(Id id) {
Expand All @@ -259,9 +269,8 @@ public void invalidate(Id id) {
@Override
public void traverse(Consumer<Object> consumer) {
E.checkNotNull(consumer, "consumer");
for (LinkNode<Id, Object> node : this.map.values()) {
consumer.accept(node.value());
}
// NOTE: forEach is 20% faster than for-in with ConcurrentHashMap
this.map.values().forEach(node -> consumer.accept(node.value()));
}

@Watched(prefix = "ramcache")
Expand Down
Loading

0 comments on commit daa7eee

Please sign in to comment.