diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/HugeGraphParams.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/HugeGraphParams.java index 458643c0d1..8d51af883a 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/HugeGraphParams.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/HugeGraphParams.java @@ -23,6 +23,7 @@ import com.baidu.hugegraph.backend.serializer.AbstractSerializer; import com.baidu.hugegraph.backend.store.BackendFeatures; import com.baidu.hugegraph.backend.store.BackendStore; +import com.baidu.hugegraph.backend.store.ram.RamTable; import com.baidu.hugegraph.backend.tx.GraphTransaction; import com.baidu.hugegraph.backend.tx.SchemaTransaction; import com.baidu.hugegraph.config.HugeConfig; @@ -68,4 +69,5 @@ public interface HugeGraphParams { public Analyzer analyzer(); public RateLimiter writeRateLimiter(); public RateLimiter readRateLimiter(); + public RamTable ramtable(); } diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/StandardHugeGraph.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/StandardHugeGraph.java index 94258e7ea3..a25bcd93d8 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/StandardHugeGraph.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/StandardHugeGraph.java @@ -54,6 +54,7 @@ import com.baidu.hugegraph.backend.store.BackendStore; import com.baidu.hugegraph.backend.store.BackendStoreProvider; import com.baidu.hugegraph.backend.store.BackendStoreSystemInfo; +import com.baidu.hugegraph.backend.store.ram.RamTable; import com.baidu.hugegraph.backend.tx.GraphTransaction; import com.baidu.hugegraph.backend.tx.SchemaTransaction; import com.baidu.hugegraph.config.CoreOptions; @@ -126,26 +127,37 @@ public class StandardHugeGraph implements HugeGraph { private final BackendStoreProvider storeProvider; private final TinkerpopTransaction tx; - public StandardHugeGraph(HugeConfig configuration) { + private final RamTable ramtable; + + public StandardHugeGraph(HugeConfig config) { this.params = new StandardHugeGraphParams(); - this.configuration = configuration; + this.configuration = config; this.schemaEventHub = new EventHub("schema"); this.graphEventHub = new EventHub("graph"); this.indexEventHub = new EventHub("index"); - final int writeLimit = configuration.get(CoreOptions.RATE_LIMIT_WRITE); + final int writeLimit = config.get(CoreOptions.RATE_LIMIT_WRITE); this.writeRateLimiter = writeLimit > 0 ? RateLimiter.create(writeLimit) : null; - final int readLimit = configuration.get(CoreOptions.RATE_LIMIT_READ); + final int readLimit = config.get(CoreOptions.RATE_LIMIT_READ); this.readRateLimiter = readLimit > 0 ? RateLimiter.create(readLimit) : null; + boolean ramtableEnable = config.get(CoreOptions.QUERY_RAMTABLE_ENABLE); + if (ramtableEnable) { + long vc = config.get(CoreOptions.QUERY_RAMTABLE_VERTICES_CAPACITY); + int ec = config.get(CoreOptions.QUERY_RAMTABLE_EDGES_CAPACITY); + this.ramtable = new RamTable(this, vc, ec); + } else { + this.ramtable = null; + } + this.taskManager = TaskManager.instance(); this.features = new HugeFeatures(this, true); - this.name = configuration.get(CoreOptions.STORE); + this.name = config.get(CoreOptions.STORE); this.started = false; this.closed = false; this.mode = GraphMode.NONE; @@ -403,6 +415,19 @@ private Analyzer analyzer() { return AnalyzerFactory.analyzer(name, mode); } + protected void reloadRamtable() { + this.reloadRamtable(false); + } + + protected void reloadRamtable(boolean loadFromFile) { + // Expect triggered manually, like gremlin job + if (this.ramtable != null) { + this.ramtable.reload(loadFromFile, this.name); + } else { + LOG.warn("The ramtable feature is not enabled for graph {}", this); + } + } + @Override public C compute(Class clazz) throws IllegalArgumentException { @@ -988,6 +1013,11 @@ public RateLimiter writeRateLimiter() { public RateLimiter readRateLimiter() { return StandardHugeGraph.this.readRateLimiter; } + + @Override + public RamTable ramtable() { + return StandardHugeGraph.this.ramtable; + } } private class TinkerpopTransaction extends AbstractThreadLocalTransaction { diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/cache/CachedGraphTransaction.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/cache/CachedGraphTransaction.java index 280c71181b..056f2c3fce 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/cache/CachedGraphTransaction.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/cache/CachedGraphTransaction.java @@ -34,6 +34,7 @@ import com.baidu.hugegraph.backend.query.QueryResults; import com.baidu.hugegraph.backend.store.BackendMutation; import com.baidu.hugegraph.backend.store.BackendStore; +import com.baidu.hugegraph.backend.store.ram.RamTable; import com.baidu.hugegraph.backend.tx.GraphTransaction; import com.baidu.hugegraph.config.CoreOptions; import com.baidu.hugegraph.config.HugeConfig; @@ -226,6 +227,11 @@ private Iterator queryVerticesByIds(IdQuery query) { @Override protected final Iterator queryEdgesFromBackend(Query query) { + RamTable ramtable = this.params().ramtable(); + if (ramtable != null && ramtable.matched(query)) { + return ramtable.query(query); + } + if (query.empty() || query.paging() || query.bigCapacity()) { // Query all edges or query edges in paging, don't cache it return super.queryEdgesFromBackend(query); diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntIntMap.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntIntMap.java new file mode 100644 index 0000000000..755cbe9ebf --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntIntMap.java @@ -0,0 +1,78 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.backend.store.ram; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.Arrays; + +import com.baidu.hugegraph.HugeException; + +public final class IntIntMap implements RamMap { + + // TODO: use com.carrotsearch.hppc.IntIntHashMap instead + private final int[] array; + + public IntIntMap(int capacity) { + this.array = new int[capacity]; + } + + public void put(long key, int value) { + assert 0 <= key && key < Integer.MAX_VALUE; + this.array[(int) key] = value; + } + + public int get(long key) { + assert 0 <= key && key < Integer.MAX_VALUE; + return this.array[(int) key]; + } + + @Override + public void clear() { + Arrays.fill(this.array, 0); + } + + @Override + public long size() { + return this.array.length; + } + + @Override + public void writeTo(DataOutputStream buffer) throws IOException { + buffer.writeInt(this.array.length); + for (int value : this.array) { + buffer.writeInt(value); + } + } + + @Override + public void readFrom(DataInputStream buffer) throws IOException { + int size = buffer.readInt(); + if (size > this.array.length) { + throw new HugeException("Invalid size %s, expect < %s", + size, this.array.length); + } + for (int i = 0; i < size; i++) { + int value = buffer.readInt(); + this.array[i] = value; + } + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntLongMap.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntLongMap.java new file mode 100644 index 0000000000..7bbb7b98c9 --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntLongMap.java @@ -0,0 +1,94 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.backend.store.ram; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.Arrays; + +import com.baidu.hugegraph.HugeException; + +public final class IntLongMap implements RamMap { + + // TODO: use com.carrotsearch.hppc.IntLongHashMap instead + private final long[] array; + private int size; + + public IntLongMap(int capacity) { + this.array = new long[capacity]; + this.size = 0; + } + + public void put(int key, long value) { + if (key >= this.size || key < 0) { + throw new HugeException("Invalid key %s", key); + } + this.array[key] = value; + } + + public int add(long value) { + if (this.size == Integer.MAX_VALUE) { + throw new HugeException("Too many edges %s", this.size); + } + int index = this.size; + this.array[index] = value; + this.size++; + return index; + } + + public long get(int key) { + if (key >= this.size || key < 0) { + throw new HugeException("Invalid key %s", key); + } + return this.array[key]; + } + + @Override + public void clear() { + Arrays.fill(this.array, 0L); + } + + @Override + public long size() { + return this.size; + } + + @Override + public void writeTo(DataOutputStream buffer) throws IOException { + buffer.writeInt(this.array.length); + for (long value : this.array) { + buffer.writeLong(value); + } + } + + @Override + public void readFrom(DataInputStream buffer) throws IOException { + int size = buffer.readInt(); + if (size > this.array.length) { + throw new HugeException("Invalid size %s, expect < %s", + size, this.array.length); + } + for (int i = 0; i < size; i++) { + long value = buffer.readLong(); + this.array[i] = value; + } + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntObjectMap.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntObjectMap.java new file mode 100644 index 0000000000..22d4ebc7dc --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/IntObjectMap.java @@ -0,0 +1,65 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.backend.store.ram; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.Arrays; + +import com.baidu.hugegraph.exception.NotSupportException; + +public final class IntObjectMap implements RamMap { + + private final Object[] array; + + public IntObjectMap(int size) { + this.array = new Object[size]; + } + + @SuppressWarnings("unchecked") + public V get(int key) { + return (V) this.array[key]; + } + + public void set(int key, V value) { + this.array[key] = value; + } + + @Override + public void clear() { + Arrays.fill(this.array, null); + } + + @Override + public long size() { + return this.array.length; + } + + @Override + public void writeTo(DataOutputStream buffer) throws IOException { + throw new NotSupportException("IntObjectMap.writeTo"); + } + + @Override + public void readFrom(DataInputStream buffer) throws IOException { + throw new NotSupportException("IntObjectMap.readFrom"); + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamMap.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamMap.java new file mode 100644 index 0000000000..62c72a4c0c --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamMap.java @@ -0,0 +1,35 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.backend.store.ram; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public interface RamMap { + + public void clear(); + + public long size(); + + public void writeTo(DataOutputStream buffer) throws IOException; + + public void readFrom(DataInputStream buffer) throws IOException; +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamTable.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamTable.java new file mode 100644 index 0000000000..67eb480d4f --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/store/ram/RamTable.java @@ -0,0 +1,574 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.backend.store.ram; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; + +import org.apache.commons.io.FileUtils; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; +import org.slf4j.Logger; + +import com.baidu.hugegraph.HugeException; +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.EdgeId; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.backend.query.Condition; +import com.baidu.hugegraph.backend.query.ConditionQuery; +import com.baidu.hugegraph.backend.query.Query; +import com.baidu.hugegraph.schema.EdgeLabel; +import com.baidu.hugegraph.schema.VertexLabel; +import com.baidu.hugegraph.structure.HugeEdge; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.type.HugeType; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.type.define.HugeKeys; +import com.baidu.hugegraph.util.Consumers; +import com.baidu.hugegraph.util.Log; + +public final class RamTable { + + public static final String USER_DIR = System.getProperty("user.dir"); + public static final String EXPORT_PATH = USER_DIR + "/export"; + + private static final Logger LOG = Log.logger(RamTable.class); + + // max vertices count, include non exists vertex, default 2.4 billion + private static final long VERTICES_CAPACITY = 2400000000L; + // max edges count, include OUT and IN edges, default 2.1 billion + private static final int EDGES_CAPACITY = 2100000000; + + private static final int NULL = 0; + + private static final Condition BOTH_COND = Condition.or( + Condition.eq(HugeKeys.DIRECTION, Directions.OUT), + Condition.eq(HugeKeys.DIRECTION, Directions.IN)); + + private final HugeGraph graph; + private final long verticesCapacity; + private final int verticesCapacityHalf; + private final int edgesCapacity; + + private IntIntMap verticesLow; + private IntIntMap verticesHigh; + private IntLongMap edges; + + private volatile boolean loading = false; + + public RamTable(HugeGraph graph) { + this(graph, VERTICES_CAPACITY, EDGES_CAPACITY); + } + + public RamTable(HugeGraph graph, long maxVertices, int maxEdges) { + this.graph = graph; + this.verticesCapacity = maxVertices + 2L; + this.verticesCapacityHalf = (int) (this.verticesCapacity / 2L); + this.edgesCapacity = maxEdges + 1; + this.reset(); + } + + private void reset() { + this.verticesLow = null; + this.verticesHigh = null; + this.edges = null; + this.verticesLow = new IntIntMap(this.verticesCapacityHalf); + this.verticesHigh = new IntIntMap(this.verticesCapacityHalf); + this.edges = new IntLongMap(this.edgesCapacity); + // Set the first element as null edge + this.edges.add(0L); + } + + public void reload(boolean loadFromFile, String file) { + if (this.loading) { + throw new HugeException("There is one loading task, " + + "please wait for it to complete"); + } + + this.loading = true; + try { + this.reset(); + if (loadFromFile) { + this.loadFromFile(file); + } else { + this.loadFromDB(); + if (file != null) { + LOG.info("Export graph to file '{}'", file); + if (!this.exportToFile(file)) { + LOG.warn("Can't export graph to file '{}'", file); + } + } + } + LOG.info("Loaded {} edges", this.edgesSize()); + } catch (Throwable e) { + this.reset(); + throw new HugeException("Failed to load ramtable", e); + } finally { + this.loading = false; + } + } + + private void loadFromFile(String fileName) throws Exception { + File file = Paths.get(EXPORT_PATH, fileName).toFile(); + if (!file.exists() || !file.isFile() || !file.canRead()) { + throw new IllegalArgumentException(String.format( + "File '%s' does not existed or readable", fileName)); + } + try (FileInputStream fis = new FileInputStream(file); + BufferedInputStream bis = new BufferedInputStream(fis); + DataInputStream input = new DataInputStream(bis)) { + // read vertices + this.verticesLow.readFrom(input); + this.verticesHigh.readFrom(input); + // read edges + this.edges.readFrom(input); + } + } + + private boolean exportToFile(String fileName) throws Exception { + File file = Paths.get(EXPORT_PATH, fileName).toFile(); + if (!file.exists()) { + FileUtils.forceMkdir(file.getParentFile()); + if (!file.createNewFile()) { + return false; + } + } + try (FileOutputStream fos = new FileOutputStream(file); + BufferedOutputStream bos = new BufferedOutputStream(fos); + DataOutputStream output = new DataOutputStream(bos)) { + // write vertices + this.verticesLow.writeTo(output); + this.verticesHigh.writeTo(output); + // write edges + this.edges.writeTo(output); + } + return true; + } + + private void loadFromDB() throws Exception { + Query query = new Query(HugeType.VERTEX); + query.capacity(this.verticesCapacityHalf * 2L); + query.limit(Query.NO_LIMIT); + Iterator vertices = this.graph.vertices(query); + + // switch concurrent loading here + boolean concurrent = true; + if (concurrent) { + try (LoadTraverser traverser = new LoadTraverser()) { + traverser.load(vertices); + } + return; + } + + Iterator adjEdges; + Id lastId = IdGenerator.ZERO; + while (vertices.hasNext()) { + Id vertex = (Id) vertices.next().id(); + if (vertex.compareTo(lastId) < 0) { + throw new HugeException("The ramtable feature is not " + + "supported by %s backend", + this.graph.backend()); + } + if (!vertex.number()) { + throw new HugeException("Only number id is supported by " + + "ramtable, but got %s id '%s'", + vertex.type().name().toLowerCase(), + vertex); + } + lastId = vertex; + + adjEdges = this.graph.adjacentEdges(vertex); + if (adjEdges.hasNext()) { + HugeEdge edge = (HugeEdge) adjEdges.next(); + this.addEdge(true, edge); + } + while (adjEdges.hasNext()) { + HugeEdge edge = (HugeEdge) adjEdges.next(); + this.addEdge(false, edge); + } + } + } + + public void addEdge(boolean newVertex, HugeEdge edge) { + this.addEdge(newVertex, + edge.id().ownerVertexId().asLong(), + edge.id().otherVertexId().asLong(), + edge.direction(), + (int) edge.schemaLabel().id().asLong()); + } + + public void addEdge(boolean newVertex, long owner, long target, + Directions direction, int label) { + long value = encode(target, direction, label); + this.addEdge(newVertex, owner, value); + } + + public void addEdge(boolean newVertex, long owner, long value) { + int position = this.edges.add(value); + if (newVertex) { + assert this.vertexAdjPosition(owner) <= NULL : owner; + this.vertexAdjPosition(owner, position); + } + // maybe there is no edges of the next vertex, set -position first + this.vertexAdjPosition(owner + 1, -position); + } + + public long edgesSize() { + // -1 means the first is NULL edge + return this.edges.size() - 1L; + } + + public boolean matched(Query query) { + if (this.edgesSize() == 0L || this.loading) { + return false; + } + if (!query.resultType().isEdge() || + !(query instanceof ConditionQuery)) { + return false; + } + + ConditionQuery cq = (ConditionQuery) query; + + int conditionsSize = cq.conditions().size(); + Id owner = cq.condition(HugeKeys.OWNER_VERTEX); + Directions direction = cq.condition(HugeKeys.DIRECTION); + Id label = cq.condition(HugeKeys.LABEL); + + if (direction == null && conditionsSize > 1) { + for (Condition cond : cq.conditions()) { + if (cond.equals(BOTH_COND)) { + direction = Directions.BOTH; + } + } + } + + int matchedConds = 0; + if (owner != null) { + matchedConds++; + } else { + return false; + } + if (direction != null) { + matchedConds++; + } + if (label != null) { + matchedConds++; + } + return matchedConds == cq.conditions().size(); + } + + public Iterator query(Query query) { + assert this.matched(query); + assert this.edgesSize() > 0; + + ConditionQuery cq = (ConditionQuery) query; + Id owner = cq.condition(HugeKeys.OWNER_VERTEX); + assert owner != null; + Directions dir = cq.condition(HugeKeys.DIRECTION); + if (dir == null) { + dir = Directions.BOTH; + } + Id label = cq.condition(HugeKeys.LABEL); + if (label == null) { + label = IdGenerator.ZERO; + } + return this.query(owner.asLong(), dir, (int) label.asLong()); + } + + public Iterator query(long owner, Directions dir, int label) { + if (this.loading) { + // don't query when loading + return Collections.emptyIterator(); + } + + int start = this.vertexAdjPosition(owner); + if (start <= NULL) { + return Collections.emptyIterator(); + } + int end = this.vertexAdjPosition(owner + 1); + assert start != NULL; + if (end < NULL) { + // The next vertex does not exist edges + end = 1 - end; + } + return new EdgeRangeIterator(start, end, dir, label, owner); + } + + private void vertexAdjPosition(long vertex, int position) { + if (vertex < this.verticesCapacityHalf) { + this.verticesLow.put(vertex, position); + } else if (vertex < this.verticesCapacity) { + vertex -= this.verticesCapacityHalf; + assert vertex < Integer.MAX_VALUE; + this.verticesHigh.put(vertex, position); + } else { + throw new HugeException("Out of vertices capaticy %s", + this.verticesCapacity); + } + } + + private int vertexAdjPosition(long vertex) { + if (vertex < this.verticesCapacityHalf) { + return this.verticesLow.get(vertex); + } else if (vertex < this.verticesCapacity) { + vertex -= this.verticesCapacityHalf; + assert vertex < Integer.MAX_VALUE; + return this.verticesHigh.get(vertex); + } else { + throw new HugeException("Out of vertices capaticy %s: %s", + this.verticesCapacity, vertex); + } + } + + private static long encode(long target, Directions direction, int label) { + // TODO: support property + assert (label & 0x0fffffff) == label; + assert target < 2L * Integer.MAX_VALUE : target; + long value = target & 0xffffffff; + long dir = direction == Directions.OUT ? + 0x00000000L : 0x80000000L; + value = (value << 32) | (dir | label); + return value; + } + + private class EdgeRangeIterator implements Iterator { + + private final int end; + private final Directions dir; + private final int label; + private final Id owner; + private int current; + private HugeEdge currentEdge; + + public EdgeRangeIterator(int start, int end, + Directions dir, int label, long owner) { + assert 0 < start && start < end; + this.end = end; + this.dir = dir; + this.label = label; + this.owner = IdGenerator.of(owner); + + this.current = start; + this.currentEdge = null; + } + + @Override + public boolean hasNext() { + if (this.currentEdge != null) { + return true; + } + while (this.current < this.end) { + this.currentEdge = this.fetch(); + if (this.currentEdge != null) { + return true; + } + } + return false; + } + + @Override + public HugeEdge next() { + if (!this.hasNext()) { + throw new NoSuchElementException(); + } + assert this.currentEdge != null; + HugeEdge edge = this.currentEdge; + this.currentEdge = null; + return edge; + } + + private HugeEdge fetch() { + if (this.current >= this.end) { + return null; + } + long value = RamTable.this.edges.get(this.current++); + long target = value >>> 32; + assert target >= 0L : target; + Directions actualDir = (value & 0x80000000L) == 0L ? + Directions.OUT : Directions.IN; + int label = (int) value & 0x7fffffff; + assert label >= 0; + + if (this.dir != actualDir && this.dir != Directions.BOTH) { + return null; + } + if (this.label != label && this.label != 0) { + return null; + } + + Id labelId = IdGenerator.of(label); + Id targetId = IdGenerator.of(target); + EdgeId id = new EdgeId(this.owner, actualDir, labelId, + "", targetId); + HugeGraph graph = RamTable.this.graph; + EdgeLabel edgeLabel = graph.edgeLabel(labelId); + VertexLabel srcLabel = graph.vertexLabelOrNone( + edgeLabel.sourceLabel()); + VertexLabel tgtLabel = graph.vertexLabelOrNone( + edgeLabel.targetLabel()); + + HugeEdge edge = new HugeEdge(graph, id, edgeLabel); + if (actualDir == Directions.OUT) { + HugeVertex owner = new HugeVertex(graph, this.owner, srcLabel); + HugeVertex other = new HugeVertex(graph, targetId, tgtLabel); + owner.propNotLoaded(); + other.propNotLoaded(); + edge.vertices(true, owner, other); + } else { + HugeVertex owner = new HugeVertex(graph, this.owner, tgtLabel); + HugeVertex other = new HugeVertex(graph, targetId, srcLabel); + owner.propNotLoaded(); + other.propNotLoaded(); + edge.vertices(false, owner, other); + } + edge.propNotLoaded(); + return edge; + } + } + + private class LoadTraverser implements AutoCloseable { + + private final HugeGraph graph; + private final ExecutorService executor; + private final List vertices; + private final Map> edges; + + private static final int ADD_BATCH = Consumers.QUEUE_WORKER_SIZE; + + public LoadTraverser() { + this.graph = RamTable.this.graph; + this.executor = Consumers.newThreadPool("ramtable-load", + Consumers.THREADS); + this.vertices = new ArrayList<>(ADD_BATCH); + this.edges = new ConcurrentHashMap<>(); + } + + @Override + public void close() throws Exception { + if (this.executor != null) { + this.executor.shutdown(); + } + } + + protected long load(Iterator vertices) { + Consumers consumers = new Consumers<>(this.executor, vertex -> { + Iterator adjEdges = this.graph.adjacentEdges(vertex); + this.edges.put(vertex, IteratorUtils.list(adjEdges)); + }, null); + + consumers.start("ramtable-loading"); + + long total = 0L; + try { + while (vertices.hasNext()) { + if (++total % 10000000 == 0) { + LOG.info("Loaded {} vertices", total); + } + + Id vertex = (Id) vertices.next().id(); + this.addVertex(vertex); + + consumers.provide(vertex); + } + } catch (Consumers.StopExecution e) { + // pass + } catch (Throwable e) { + throw Consumers.wrapException(e); + } finally { + try { + consumers.await(); + } catch (Throwable e) { + throw Consumers.wrapException(e); + } finally { + CloseableIterator.closeIterator(vertices); + } + } + this.addEdgesByBatch(); + return total; + } + + private void addVertex(Id vertex) { + Id lastId = IdGenerator.ZERO; + if (this.vertices.size() > 0) { + lastId = this.vertices.get(this.vertices.size() - 1); + } + if (vertex.compareTo(lastId) < 0) { + throw new HugeException("The ramtable feature is not " + + "supported by %s backend", + this.graph.backend()); + } + if (!vertex.number()) { + throw new HugeException("Only number id is supported " + + "by ramtable, but got %s id '%s'", + vertex.type().name().toLowerCase(), + vertex); + } + + if (this.vertices.size() >= ADD_BATCH) { + this.addEdgesByBatch(); + } + this.vertices.add(vertex); + } + + private void addEdgesByBatch() { + int waitTimes = 0; + for (Id vertex : this.vertices) { + List adjEdges = this.edges.remove(vertex); + while (adjEdges == null) { + waitTimes++; + try { + Thread.sleep(1); + } catch (InterruptedException ignored) { + // pass + } + adjEdges = this.edges.remove(vertex); + } + for (int i = 0; i < adjEdges.size(); i++) { + HugeEdge edge = (HugeEdge) adjEdges.get(i); + assert edge.id().ownerVertexId().equals(vertex); + addEdge(i == 0, edge); + } + } + + if (waitTimes > this.vertices.size()) { + LOG.info("Loading wait times is {}", waitTimes); + } + + this.vertices.clear(); + } + } +} diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/tx/GraphTransaction.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/tx/GraphTransaction.java index 545c387048..1b4d8c3ff3 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/tx/GraphTransaction.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/backend/tx/GraphTransaction.java @@ -27,7 +27,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.NoSuchElementException; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Consumer; @@ -667,11 +666,11 @@ public Iterator queryVertices(Object... vertexIds) { public Vertex queryVertex(Object vertexId) { Iterator iter = this.queryVerticesByIds(new Object[]{vertexId}, false, true); - try { - return iter.next(); - } finally { - CloseableIterator.closeIterator(iter); + Vertex vertex = QueryResults.one(iter); + if (vertex == null) { + throw new NotFoundException("Vertex '%s' does not exist", vertexId); } + return vertex; } protected Iterator queryVerticesByIds(Object[] vertexIds, @@ -821,13 +820,11 @@ public Iterator queryEdges(Object... edgeIds) { public Edge queryEdge(Object edgeId) { Iterator iter = this.queryEdgesByIds(new Object[]{edgeId}, true); - try { - return iter.next(); - } catch (NoSuchElementException e) { + Edge edge = QueryResults.one(iter); + if (edge == null) { throw new NotFoundException("Edge '%s' does not exist", edgeId); - } finally { - CloseableIterator.closeIterator(iter); } + return edge; } protected Iterator queryEdgesByIds(Object[] edgeIds, @@ -841,8 +838,14 @@ protected Iterator queryEdgesByIds(Object[] edgeIds, IdQuery query = new IdQuery(HugeType.EDGE); for (Object edgeId : edgeIds) { HugeEdge edge; - Id id = HugeEdge.getIdValue(edgeId, !verifyId); - if (id == null || this.removedEdges.containsKey(id)) { + EdgeId id = HugeEdge.getIdValue(edgeId, !verifyId); + if (id == null) { + continue; + } + if (id.direction() == Directions.IN) { + id = id.switchDirection(); + } + if (this.removedEdges.containsKey(id)) { // The record has been deleted continue; } else if ((edge = this.addedEdges.get(id)) != null || diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/config/CoreOptions.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/config/CoreOptions.java index faa6427572..a89fb3c255 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/config/CoreOptions.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/config/CoreOptions.java @@ -309,6 +309,32 @@ public static synchronized CoreOptions instance() { 1000 ); + public static final ConfigOption QUERY_RAMTABLE_ENABLE = + new ConfigOption<>( + "query.ramtable_enable", + "Whether to enable ramtable for query of adjacent edges.", + disallowEmpty(), + false + ); + + public static final ConfigOption QUERY_RAMTABLE_VERTICES_CAPACITY = + new ConfigOption<>( + "query.ramtable_vertices_capacity", + "The maximum number of vertices in ramtable, " + + "generally the largest vertex id is used as capacity.", + rangeInt(1L, Integer.MAX_VALUE * 2L), + 10000000L + ); + + public static final ConfigOption QUERY_RAMTABLE_EDGES_CAPACITY = + new ConfigOption<>( + "query.ramtable_edges_capacity", + "The maximum number of edges in ramtable, " + + "include OUT and IN edges.", + rangeInt(1, Integer.MAX_VALUE), + 20000000 + ); + public static final ConfigOption VERTEX_TX_CAPACITY = new ConfigOption<>( "vertex.tx_capacity", diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/structure/HugeEdge.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/structure/HugeEdge.java index c46185b2ac..a03d519163 100644 --- a/hugegraph-core/src/main/java/com/baidu/hugegraph/structure/HugeEdge.java +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/structure/HugeEdge.java @@ -476,11 +476,11 @@ public String toString() { return StringFactory.edgeString(this); } - public static final Id getIdValue(Object idValue, - boolean returnNullIfError) { + public static final EdgeId getIdValue(Object idValue, + boolean returnNullIfError) { Id id = HugeElement.getIdValue(idValue); if (id == null || id instanceof EdgeId) { - return id; + return (EdgeId) id; } return EdgeId.parse(id.asString(), returnNullIfError); } diff --git a/hugegraph-core/src/main/java/com/baidu/hugegraph/util/Consumers.java b/hugegraph-core/src/main/java/com/baidu/hugegraph/util/Consumers.java new file mode 100644 index 0000000000..aedb547fbf --- /dev/null +++ b/hugegraph-core/src/main/java/com/baidu/hugegraph/util/Consumers.java @@ -0,0 +1,223 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.util; + +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import org.slf4j.Logger; + +import com.baidu.hugegraph.HugeException; +import com.baidu.hugegraph.task.TaskManager.ContextCallable; + +public class Consumers { + + public static final int CPUS = Runtime.getRuntime().availableProcessors(); + public static final int THREADS = 4 + CPUS / 4; + public static final int QUEUE_WORKER_SIZE = 1000; + + private static final Logger LOG = Log.logger(Consumers.class); + + private final ExecutorService executor; + private final Consumer consumer; + private final Runnable done; + + private final int workers; + private final int queueSize; + private final CountDownLatch latch; + private final BlockingQueue queue; + + private volatile boolean ending = false; + private volatile Throwable exception = null; + + public Consumers(ExecutorService executor, Consumer consumer) { + this(executor, consumer, null); + } + + public Consumers(ExecutorService executor, + Consumer consumer, Runnable done) { + this.executor = executor; + this.consumer = consumer; + this.done = done; + + int workers = THREADS; + if (this.executor instanceof ThreadPoolExecutor) { + workers = ((ThreadPoolExecutor) this.executor).getCorePoolSize(); + } + this.workers = workers; + this.queueSize = QUEUE_WORKER_SIZE * workers; + this.latch = new CountDownLatch(workers); + this.queue = new ArrayBlockingQueue<>(this.queueSize); + } + + public void start(String name) { + this.ending = false; + this.exception = null; + if (this.executor == null) { + return; + } + LOG.info("Starting {} workers[{}] with queue size {}...", + this.workers, name, this.queueSize); + for (int i = 0; i < this.workers; i++) { + this.executor.submit(new ContextCallable<>(this::runAndDone)); + } + } + + private Void runAndDone() { + try { + this.run(); + } catch (Throwable e) { + // Only the first exception of one thread can be stored + this.exception = e; + if (!(e instanceof StopExecution)) { + LOG.error("Error when running task", e); + } + } finally { + this.done(); + this.latch.countDown(); + } + return null; + } + + private void run() { + LOG.debug("Start to work..."); + while (!this.ending) { + this.consume(); + } + assert this.ending; + while (this.consume()); + + LOG.debug("Worker finished"); + } + + private boolean consume() { + V elem; + try { + elem = this.queue.poll(1, TimeUnit.SECONDS); + } catch (InterruptedException e) { + // ignore + return true; + } + if (elem == null) { + return false; + } + // do job + this.consumer.accept(elem); + return true; + } + + private void done() { + if (this.done == null) { + return; + } + + try { + this.done.run(); + } catch (Throwable e) { + if (this.exception == null) { + this.exception = e; + } else { + LOG.warn("Error while calling done()", e);; + } + } + } + + private Throwable throwException() { + assert this.exception != null; + Throwable e = this.exception; + this.exception = null; + return e; + } + + public void provide(V v) throws Throwable { + if (this.executor == null) { + assert this.exception == null; + // do job directly if without thread pool + this.consumer.accept(v); + } else if (this.exception != null) { + throw this.throwException(); + } else { + try { + this.queue.put(v); + } catch (InterruptedException e) { + LOG.warn("Interrupted while enqueue", e);; + } + } + } + + public void await() throws Throwable { + this.ending = true; + if (this.executor == null) { + // call done() directly if without thread pool + this.done(); + } else { + try { + this.latch.await(); + } catch (InterruptedException e) { + LOG.warn("Interrupted while waiting for consumers", e); + } + } + + if (this.exception != null) { + throw this.throwException(); + } + } + + public static ExecutorService newThreadPool(String prefix, int workers) { + if (workers == 0) { + return null; + } else { + if (workers < 0) { + assert workers == -1; + workers = Consumers.THREADS; + } else if (workers > Consumers.CPUS * 2) { + workers = Consumers.CPUS * 2; + } + String name = prefix + "-worker-%d"; + return ExecutorUtil.newFixedThreadPool(workers, name); + } + } + + public static RuntimeException wrapException(Throwable e) { + if (e instanceof RuntimeException) { + throw (RuntimeException) e; + } + throw new HugeException("Error when running task: %s", + HugeException.rootCause(e).getMessage(), e); + } + + public static class StopExecution extends HugeException { + + private static final long serialVersionUID = -371829356182454517L; + + public StopExecution(String message) { + super(message); + } + + public StopExecution(String message, Object... args) { + super(message, args); + } + } +} diff --git a/hugegraph-test/src/main/java/com/baidu/hugegraph/core/CoreTestSuite.java b/hugegraph-test/src/main/java/com/baidu/hugegraph/core/CoreTestSuite.java index 142065710c..9a6cb4b15c 100644 --- a/hugegraph-test/src/main/java/com/baidu/hugegraph/core/CoreTestSuite.java +++ b/hugegraph-test/src/main/java/com/baidu/hugegraph/core/CoreTestSuite.java @@ -48,7 +48,8 @@ RestoreCoreTest.class, TaskCoreTest.class, UsersTest.class, - MultiGraphsTest.class + MultiGraphsTest.class, + RamTableTest.class }) public class CoreTestSuite { diff --git a/hugegraph-test/src/main/java/com/baidu/hugegraph/core/RamTableTest.java b/hugegraph-test/src/main/java/com/baidu/hugegraph/core/RamTableTest.java new file mode 100644 index 0000000000..8a2432939f --- /dev/null +++ b/hugegraph-test/src/main/java/com/baidu/hugegraph/core/RamTableTest.java @@ -0,0 +1,837 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.core; + +import java.io.File; +import java.nio.file.Paths; +import java.util.Iterator; + +import org.apache.commons.io.FileUtils; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Test; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.backend.query.Query; +import com.baidu.hugegraph.backend.store.ram.RamTable; +import com.baidu.hugegraph.backend.tx.GraphTransaction; +import com.baidu.hugegraph.schema.SchemaManager; +import com.baidu.hugegraph.structure.HugeEdge; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.testutil.Assert; +import com.baidu.hugegraph.testutil.Whitebox; +import com.baidu.hugegraph.type.define.Directions; + +public class RamTableTest extends BaseCoreTest { + + private Object ramtable; + + @Override + @Before + public void setup() { + super.setup(); + + HugeGraph graph = this.graph(); + + Assume.assumeTrue("Ramtable is not supported by backend", + graph.backendStoreFeatures().supportsScanKeyPrefix()); + this.ramtable = Whitebox.getInternalState(graph, "ramtable"); + if (this.ramtable == null) { + Whitebox.setInternalState(graph, "ramtable", + new RamTable(graph, 2000, 1200)); + } + + graph.schema().vertexLabel("vl1").useCustomizeNumberId().create(); + graph.schema().vertexLabel("vl2").useCustomizeNumberId().create(); + graph.schema().edgeLabel("el1") + .sourceLabel("vl1") + .targetLabel("vl1") + .create(); + graph.schema().edgeLabel("el2") + .sourceLabel("vl2") + .targetLabel("vl2") + .create(); + } + + @Override + @After + public void teardown() throws Exception { + super.teardown(); + + File export = Paths.get(RamTable.EXPORT_PATH).toFile(); + if (export.exists()) { + FileUtils.forceDelete(export); + } + + HugeGraph graph = this.graph(); + Whitebox.setInternalState(graph, "ramtable", this.ramtable); + } + + @Test + public void testReloadAndQuery() throws Exception { + HugeGraph graph = this.graph(); + + // insert vertices and edges + for (int i = 0; i < 100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl1", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl1", T.id, i + 100); + v1.addEdge("el1", v2); + } + graph.tx().commit(); + + for (int i = 1000; i < 1100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl2", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl2", T.id, i + 100); + v1.addEdge("el2", v2); + } + graph.tx().commit(); + + // reload ramtable + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph); + + // query edges + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + } + + @Test + public void testReloadFromFileAndQuery() throws Exception { + HugeGraph graph = this.graph(); + + // insert vertices and edges + for (int i = 0; i < 100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl1", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl1", T.id, i + 100); + v1.addEdge("el1", v2); + } + graph.tx().commit(); + + for (int i = 1000; i < 1100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl2", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl2", T.id, i + 100); + v1.addEdge("el2", v2); + } + graph.tx().commit(); + + // reload ramtable + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph); + + // query edges + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + + // reload ramtable from file + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph, true); + + // query edges again + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + } + + @Test + public void testReloadAndQueryWithMultiEdges() throws Exception { + HugeGraph graph = this.graph(); + + // insert vertices and edges + for (int i = 0; i < 100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl1", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl1", T.id, i + 100); + Vertex v3 = graph.addVertex(T.label, "vl1", T.id, i + 200); + v1.addEdge("el1", v2); + v1.addEdge("el1", v3); + v3.addEdge("el1", v1); + } + graph.tx().commit(); + + for (int i = 1000; i < 1100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl2", T.id, i); + Vertex v2 = graph.addVertex(T.label, "vl2", T.id, i + 100); + Vertex v3 = graph.addVertex(T.label, "vl2", T.id, i + 200); + v1.addEdge("el2", v2); + v1.addEdge("el2", v3); + v2.addEdge("el2", v3); + } + graph.tx().commit(); + + // reload ramtable + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph); + + // query edges by OUT + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query edges by BOTH + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.BOTH, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query edges by IN + for (int i = 0; i < 100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.IN, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + + // query edges by OUT + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query edges by BOTH + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.BOTH, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 100, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(i + 200, edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query edges by IN + for (int i = 1000; i < 1100; i++) { + Iterator edges = this.edgesOfVertex(IdGenerator.of(i), + Directions.IN, null); + Assert.assertFalse(edges.hasNext()); + } + } + + @Test + public void testReloadAndQueryWithBigVertex() throws Exception { + HugeGraph graph = this.graph(); + + // only enable this test when ram > 20G + boolean enableBigRamTest = false; + long big1 = 2400000000L; + long big2 = 4200000000L; + if (!enableBigRamTest) { + big1 = 100L; + big2 = 1000L; + } + + // insert vertices and edges + for (int i = 0; i < 100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl1", T.id, i + big1); + Vertex v2 = graph.addVertex(T.label, "vl1", T.id, i + big1 + 100); + v1.addEdge("el1", v2); + } + graph.tx().commit(); + + for (int i = 0; i < 100; i++) { + Vertex v1 = graph.addVertex(T.label, "vl2", T.id, i + big2); + Vertex v2 = graph.addVertex(T.label, "vl2", T.id, i + big2); + v1.addEdge("el2", v2); + } + graph.tx().commit(); + + // reload ramtable + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph); + + // query edges + for (int i = 0; i < 100; i++) { + long source = i + big1; + Iterator edges = this.edgesOfVertex(IdGenerator.of(source), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(source, + edge.id().ownerVertexId().asLong()); + Assert.assertEquals(i + big1 + 100, + edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el1", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + for (int i = 0; i < 100; i++) { + long source = i + big2; + Iterator edges = this.edgesOfVertex(IdGenerator.of(source), + Directions.OUT, null); + Assert.assertTrue(edges.hasNext()); + HugeEdge edge = (HugeEdge) edges.next(); + Assert.assertEquals(source, + edge.id().ownerVertexId().asLong()); + Assert.assertEquals(i + big2, + edge.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge.direction()); + Assert.assertEquals("el2", edge.label()); + + Assert.assertFalse(edges.hasNext()); + } + } + + @Test + public void testReloadAndQueryWithProperty() throws Exception { + HugeGraph graph = this.graph(); + SchemaManager schema = graph.schema(); + + schema.propertyKey("name") + .asText() + .create(); + schema.vertexLabel("person") + .properties("name") + .useCustomizeNumberId() + .create(); + schema.edgeLabel("next") + .sourceLabel("person") + .targetLabel("person") + .properties("name") + .create(); + + GraphTraversalSource g = graph.traversal(); + g.addV("person").property(T.id, 1).property("name", "A").as("a") + .addV("person").property(T.id, 2).property("name", "B").as("b") + .addV("person").property(T.id, 3).property("name", "C").as("c") + .addV("person").property(T.id, 4).property("name", "D").as("d") + .addV("person").property(T.id, 5).property("name", "E").as("e") + .addV("person").property(T.id, 6).property("name", "F").as("f") + .addE("next").from("a").to("b").property("name", "ab") + .addE("next").from("b").to("c").property("name", "bc") + .addE("next").from("b").to("d").property("name", "bd") + .addE("next").from("c").to("d").property("name", "cd") + .addE("next").from("c").to("e").property("name", "ce") + .addE("next").from("d").to("e").property("name", "de") + .addE("next").from("e").to("f").property("name", "ef") + .addE("next").from("f").to("d").property("name", "fd") + .iterate(); + graph.tx().commit(); + + Object ramtable = Whitebox.getInternalState(graph, "ramtable"); + Assert.assertNotNull("The ramtable is not enabled", ramtable); + + // reload ramtable + Whitebox.invoke(graph.getClass(), "reloadRamtable", graph); + + GraphTraversal vertices; + HugeVertex vertex; + GraphTraversal edges; + HugeEdge edge; + + // A + vertices = g.V(1).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertFalse(vertex.propLoaded()); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(1).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertFalse(edge.propLoaded()); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ab", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(1).in(); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(1).inE(); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(1).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(1).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ab", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + // B + vertices = g.V(2).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(2).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("bc", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("bd", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(2).in(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(1L, vertex.id().asObject()); + Assert.assertEquals("A", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(2).inE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ab", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(2).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(1L, vertex.id().asObject()); + Assert.assertEquals("A", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(2).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("bc", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("bd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ab", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + // C + vertices = g.V(3).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(3).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("cd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ce", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(3).in(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(3).inE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("bc", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(3).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(3).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("cd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ce", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("bc", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + // D + vertices = g.V(4).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(4).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("de", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(4).in(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(6L, vertex.id().asObject()); + Assert.assertEquals("F", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(4).inE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("bd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("cd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("fd", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(4).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(2L, vertex.id().asObject()); + Assert.assertEquals("B", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(6L, vertex.id().asObject()); + Assert.assertEquals("F", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(4).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("de", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("bd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("cd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("fd", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + // E + vertices = g.V(5).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(6L, vertex.id().asObject()); + Assert.assertEquals("F", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(5).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ef", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(5).in(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(5).inE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ce", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("de", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(5).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(6L, vertex.id().asObject()); + Assert.assertEquals("F", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(3L, vertex.id().asObject()); + Assert.assertEquals("C", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(5).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("ef", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ce", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("de", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + // F + vertices = g.V(6).out(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(6).outE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("fd", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(6).in(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(6).inE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ef", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + + vertices = g.V(6).both(); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(4L, vertex.id().asObject()); + Assert.assertEquals("D", vertex.value("name")); + Assert.assertTrue(vertices.hasNext()); + vertex = (HugeVertex) vertices.next(); + Assert.assertEquals(5L, vertex.id().asObject()); + Assert.assertEquals("E", vertex.value("name")); + Assert.assertFalse(vertices.hasNext()); + + edges = g.V(6).bothE(); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.OUT, edge.id().direction()); + Assert.assertEquals("fd", edge.value("name")); + Assert.assertTrue(edges.hasNext()); + edge = (HugeEdge) edges.next(); + Assert.assertEquals(Directions.IN, edge.id().direction()); + Assert.assertEquals("ef", edge.value("name")); + Assert.assertFalse(edges.hasNext()); + } + + private Iterator edgesOfVertex(Id source, Directions dir, Id label) { + Id[] labels = {}; + if (label != null) { + labels = new Id[]{label}; + } + + Query query = GraphTransaction.constructEdgesQuery(source, dir, labels); + return this.graph().edges(query); + } +} diff --git a/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/UnitTestSuite.java b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/UnitTestSuite.java index 44be81ec24..c6ec329bb5 100644 --- a/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/UnitTestSuite.java +++ b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/UnitTestSuite.java @@ -26,6 +26,7 @@ import com.baidu.hugegraph.unit.cache.CacheTest; import com.baidu.hugegraph.unit.cache.CachedGraphTransactionTest; import com.baidu.hugegraph.unit.cache.CachedSchemaTransactionTest; +import com.baidu.hugegraph.unit.cache.RamTableTest; import com.baidu.hugegraph.unit.cassandra.CassandraTest; import com.baidu.hugegraph.unit.core.AnalyzerTest; import com.baidu.hugegraph.unit.core.BackendMutationTest; @@ -49,12 +50,12 @@ import com.baidu.hugegraph.unit.mysql.WhereBuilderTest; import com.baidu.hugegraph.unit.rocksdb.RocksDBCountersTest; import com.baidu.hugegraph.unit.rocksdb.RocksDBSessionsTest; -import com.baidu.hugegraph.unit.serializer.StoreSerializerTest; import com.baidu.hugegraph.unit.serializer.BinaryBackendEntryTest; import com.baidu.hugegraph.unit.serializer.BinaryScatterSerializerTest; import com.baidu.hugegraph.unit.serializer.BinarySerializerTest; import com.baidu.hugegraph.unit.serializer.BytesBufferTest; import com.baidu.hugegraph.unit.serializer.SerializerFactoryTest; +import com.baidu.hugegraph.unit.serializer.StoreSerializerTest; import com.baidu.hugegraph.unit.serializer.TableBackendEntryTest; import com.baidu.hugegraph.unit.serializer.TextBackendEntryTest; import com.baidu.hugegraph.unit.util.JsonUtilTest; @@ -70,6 +71,7 @@ CachedSchemaTransactionTest.class, CachedGraphTransactionTest.class, CacheManagerTest.class, + RamTableTest.class, /* types */ DataTypeTest.class, diff --git a/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/CachedSchemaTransactionTest.java b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/CachedSchemaTransactionTest.java index b29c95c0a4..82884b2cbc 100644 --- a/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/CachedSchemaTransactionTest.java +++ b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/CachedSchemaTransactionTest.java @@ -174,28 +174,33 @@ public void testGetSchema() throws Exception { public void testResetCachedAllIfReachedCapacity() throws Exception { CachedSchemaTransaction cache = this.cache(); + Object old = Whitebox.getInternalState(cache, "idCache.capacity"); Whitebox.setInternalState(cache, "idCache.capacity", 2); - Assert.assertEquals(0L, Whitebox.invoke(cache, "idCache", "size")); - - FakeObjects objects = new FakeObjects("unit-test"); - cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(1), - "fake-pk-1")); - Assert.assertEquals(1L, Whitebox.invoke(cache, "idCache", "size")); - Assert.assertEquals(1, cache.getPropertyKeys().size()); - Whitebox.invoke(CachedSchemaTransaction.class, "cachedTypes", cache); - Assert.assertEquals(ImmutableMap.of(HugeType.PROPERTY_KEY, true), - Whitebox.invoke(CachedSchemaTransaction.class, - "cachedTypes", cache)); - - cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(3), - "fake-pk-2")); - cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(2), - "fake-pk-3")); - - Assert.assertEquals(2L, Whitebox.invoke(cache, "idCache", "size")); - Assert.assertEquals(3, cache.getPropertyKeys().size()); - Assert.assertEquals(ImmutableMap.of(), - Whitebox.invoke(CachedSchemaTransaction.class, - "cachedTypes", cache)); + try { + Assert.assertEquals(0L, Whitebox.invoke(cache, "idCache", "size")); + + FakeObjects objects = new FakeObjects("unit-test"); + cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(1), + "fake-pk-1")); + Assert.assertEquals(1L, Whitebox.invoke(cache, "idCache", "size")); + Assert.assertEquals(1, cache.getPropertyKeys().size()); + Whitebox.invoke(CachedSchemaTransaction.class, "cachedTypes", cache); + Assert.assertEquals(ImmutableMap.of(HugeType.PROPERTY_KEY, true), + Whitebox.invoke(CachedSchemaTransaction.class, + "cachedTypes", cache)); + + cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(3), + "fake-pk-2")); + cache.addPropertyKey(objects.newPropertyKey(IdGenerator.of(2), + "fake-pk-3")); + + Assert.assertEquals(2L, Whitebox.invoke(cache, "idCache", "size")); + Assert.assertEquals(3, cache.getPropertyKeys().size()); + Assert.assertEquals(ImmutableMap.of(), + Whitebox.invoke(CachedSchemaTransaction.class, + "cachedTypes", cache)); + } finally { + Whitebox.setInternalState(cache, "idCache.capacity", old); + } } } diff --git a/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/RamTableTest.java b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/RamTableTest.java new file mode 100644 index 0000000000..14e3da4f98 --- /dev/null +++ b/hugegraph-test/src/main/java/com/baidu/hugegraph/unit/cache/RamTableTest.java @@ -0,0 +1,222 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.unit.cache; + +import java.util.Iterator; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.baidu.hugegraph.HugeFactory; +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.store.ram.RamTable; +import com.baidu.hugegraph.structure.HugeEdge; +import com.baidu.hugegraph.testutil.Assert; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.unit.FakeObjects; + +public class RamTableTest { + + // max value is 4 billion + private static final int VERTEX_SIZE = 10000000; + private static final int EDGE_SIZE = 20000000; + + private HugeGraph graph; + + @Before + public void setup() { + this.graph = HugeFactory.open(FakeObjects.newConfig()); + this.graph.schema().vertexLabel("vl1").create(); + this.graph.schema().vertexLabel("vl2").create(); + this.graph.schema().edgeLabel("el1") + .sourceLabel("vl1") + .targetLabel("vl1") + .create(); + this.graph.schema().edgeLabel("el2") + .sourceLabel("vl2") + .targetLabel("vl2") + .create(); + } + + @After + public void teardown() throws Exception { + this.graph.close(); + } + + private HugeGraph graph() { + return this.graph; + } + + @Test + public void testAddAndQuery() throws Exception { + HugeGraph graph = this.graph(); + int el1 = (int) graph.edgeLabel("el1").id().asLong(); + int el2 = (int) graph.edgeLabel("el2").id().asLong(); + + RamTable table = new RamTable(graph, VERTEX_SIZE, EDGE_SIZE); + long oldSize = table.edgesSize(); + // insert edges + for (int i = 0; i < VERTEX_SIZE; i++) { + table.addEdge(true, i, i, Directions.OUT, el1); + Assert.assertEquals(oldSize + 2 * i + 1, table.edgesSize()); + + table.addEdge(false, i, i + 1, Directions.IN, el2); + Assert.assertEquals(oldSize + 2 * i + 2, table.edgesSize()); + } + + // query by BOTH + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.BOTH, 0); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge1.direction()); + Assert.assertEquals("el1", edge1.label()); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge2 = edges.next(); + Assert.assertEquals(i, edge2.id().ownerVertexId().asLong()); + Assert.assertEquals(i + 1L, edge2.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge2.direction()); + Assert.assertEquals("el2", edge2.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query by OUT + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.OUT, el1); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge1.direction()); + Assert.assertEquals("el1", edge1.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query by IN + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.IN, el2); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i + 1L, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge1.direction()); + Assert.assertEquals("el2", edge1.label()); + + Assert.assertFalse(edges.hasNext()); + } + + // query by BOTH & label 1 + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.BOTH, el1); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge1.direction()); + Assert.assertEquals("el1", edge1.label()); + + Assert.assertFalse(edges.hasNext()); + } + // query by BOTH & label 2 + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.BOTH, el2); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i + 1L, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge1.direction()); + Assert.assertEquals("el2", edge1.label()); + + Assert.assertFalse(edges.hasNext()); + } + + // query non-exist vertex + Iterator edges = table.query(VERTEX_SIZE, Directions.BOTH, 0); + Assert.assertFalse(edges.hasNext()); + } + + @Test + public void testAddAndQueryWithoutAdjEdges() throws Exception { + HugeGraph graph = this.graph(); + int el1 = (int) graph.edgeLabel("el1").id().asLong(); + int el2 = (int) graph.edgeLabel("el2").id().asLong(); + + RamTable table = new RamTable(graph, VERTEX_SIZE, EDGE_SIZE); + long oldSize = table.edgesSize(); + // insert edges + for (int i = 0; i < VERTEX_SIZE; i++) { + if (i % 3 != 0) { + // don't insert edges for 2/3 vertices + continue; + } + + table.addEdge(true, i, i, Directions.OUT, el1); + Assert.assertEquals(oldSize + i + 1, table.edgesSize()); + + table.addEdge(false, i, i, Directions.OUT, el2); + Assert.assertEquals(oldSize + i + 2, table.edgesSize()); + + table.addEdge(false, i, i + 1, Directions.IN, el2); + Assert.assertEquals(oldSize + i + 3, table.edgesSize()); + } + + // query by BOTH + for (int i = 0; i < VERTEX_SIZE; i++) { + Iterator edges = table.query(i, Directions.BOTH, 0); + + if (i % 3 != 0) { + Assert.assertFalse(edges.hasNext()); + continue; + } + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge1 = edges.next(); + Assert.assertEquals(i, edge1.id().ownerVertexId().asLong()); + Assert.assertEquals(i, edge1.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge1.direction()); + Assert.assertEquals("el1", edge1.label()); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge2 = edges.next(); + Assert.assertEquals(i, edge2.id().ownerVertexId().asLong()); + Assert.assertEquals(i, edge2.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.OUT, edge2.direction()); + Assert.assertEquals("el2", edge2.label()); + + Assert.assertTrue(edges.hasNext()); + HugeEdge edge3 = edges.next(); + Assert.assertEquals(i, edge3.id().ownerVertexId().asLong()); + Assert.assertEquals(i + 1L, edge3.id().otherVertexId().asLong()); + Assert.assertEquals(Directions.IN, edge3.direction()); + Assert.assertEquals("el2", edge3.label()); + + Assert.assertFalse(edges.hasNext()); + } + } +}