diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeFactory.java index 8c2c09faa5..a1d03cf0dc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeFactory.java @@ -145,7 +145,8 @@ public static void shutdown(long timeout) { /** * Stop all the daemon threads - * @param timeout wait in seconds + * + * @param timeout wait in seconds * @param ignoreException don't throw exception if true */ public static void shutdown(long timeout, boolean ignoreException) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraph.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraph.java index 4566bf2cbe..ab460d495f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraph.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraph.java @@ -249,7 +249,7 @@ public interface HugeGraph extends Graph { V option(TypedOption option); void registerRpcServices(RpcServiceConfig4Server serverConfig, - RpcServiceConfig4Client clientConfig); + RpcServiceConfig4Client clientConfig); default List mapPkId2Name(Collection ids) { List names = new ArrayList<>(ids.size()); @@ -316,8 +316,8 @@ default Id[] mapVlName2Id(String[] vertexLabels) { static void registerTraversalStrategies(Class clazz) { TraversalStrategies strategies = TraversalStrategies.GlobalCache - .getStrategies(Graph.class) - .clone(); + .getStrategies(Graph.class) + .clone(); strategies.addStrategies(HugeVertexStepStrategy.instance(), HugeGraphStepStrategy.instance(), HugeCountStepStrategy.instance(), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java index 6ef55d0181..ec50e004cb 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java @@ -17,20 +17,21 @@ package org.apache.hugegraph; +import org.apache.hugegraph.analyzer.Analyzer; import org.apache.hugegraph.backend.LocalCounter; +import org.apache.hugegraph.backend.serializer.AbstractSerializer; import org.apache.hugegraph.backend.store.BackendFeatures; import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.backend.store.ram.RamTable; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.event.EventHub; import org.apache.hugegraph.job.EphemeralJob; import org.apache.hugegraph.task.ServerInfoManager; import org.apache.hugegraph.type.define.GraphMode; import org.apache.hugegraph.type.define.GraphReadMode; -import org.apache.hugegraph.analyzer.Analyzer; -import org.apache.hugegraph.backend.serializer.AbstractSerializer; -import org.apache.hugegraph.config.HugeConfig; -import org.apache.hugegraph.event.EventHub; + import com.google.common.util.concurrent.RateLimiter; /** diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java index 4ce843787f..05a44a4a1c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java @@ -121,26 +121,26 @@ public class StandardHugeGraph implements HugeGraph { public static final Class[] PROTECT_CLASSES = { - StandardHugeGraph.class, - StandardHugeGraph.StandardHugeGraphParams.class, - TinkerPopTransaction.class, - StandardHugeGraph.Txs.class, - StandardHugeGraph.SysTransaction.class + StandardHugeGraph.class, + StandardHugeGraph.StandardHugeGraphParams.class, + TinkerPopTransaction.class, + StandardHugeGraph.Txs.class, + StandardHugeGraph.SysTransaction.class }; public static final Set> ALLOWED_CONFIGS = ImmutableSet.of( - CoreOptions.TASK_WAIT_TIMEOUT, - CoreOptions.TASK_SYNC_DELETION, - CoreOptions.TASK_TTL_DELETE_BATCH, - CoreOptions.TASK_INPUT_SIZE_LIMIT, - CoreOptions.TASK_RESULT_SIZE_LIMIT, - CoreOptions.OLTP_CONCURRENT_THREADS, - CoreOptions.OLTP_CONCURRENT_DEPTH, - CoreOptions.OLTP_COLLECTION_TYPE, - CoreOptions.VERTEX_DEFAULT_LABEL, - CoreOptions.VERTEX_ENCODE_PK_NUMBER, - CoreOptions.STORE_GRAPH, - CoreOptions.STORE + CoreOptions.TASK_WAIT_TIMEOUT, + CoreOptions.TASK_SYNC_DELETION, + CoreOptions.TASK_TTL_DELETE_BATCH, + CoreOptions.TASK_INPUT_SIZE_LIMIT, + CoreOptions.TASK_RESULT_SIZE_LIMIT, + CoreOptions.OLTP_CONCURRENT_THREADS, + CoreOptions.OLTP_CONCURRENT_DEPTH, + CoreOptions.OLTP_COLLECTION_TYPE, + CoreOptions.VERTEX_DEFAULT_LABEL, + CoreOptions.VERTEX_ENCODE_PK_NUMBER, + CoreOptions.STORE_GRAPH, + CoreOptions.STORE ); private static final Logger LOG = Log.logger(StandardHugeGraph.class); @@ -294,14 +294,17 @@ public void serverStarted(GlobalMasterInfo nodeInfo) { private void initRoleStateMachine(Id serverId) { HugeConfig conf = this.configuration; Config roleConfig = new RoleElectionConfig(serverId.toString(), - conf.get(RoleElectionOptions.NODE_EXTERNAL_URL), - conf.get(RoleElectionOptions.EXCEEDS_FAIL_COUNT), - conf.get(RoleElectionOptions.RANDOM_TIMEOUT_MILLISECOND), - conf.get(RoleElectionOptions.HEARTBEAT_INTERVAL_SECOND), - conf.get(RoleElectionOptions.MASTER_DEAD_TIMES), - conf.get(RoleElectionOptions.BASE_TIMEOUT_MILLISECOND)); + conf.get(RoleElectionOptions.NODE_EXTERNAL_URL), + conf.get(RoleElectionOptions.EXCEEDS_FAIL_COUNT), + conf.get( + RoleElectionOptions.RANDOM_TIMEOUT_MILLISECOND), + conf.get( + RoleElectionOptions.HEARTBEAT_INTERVAL_SECOND), + conf.get(RoleElectionOptions.MASTER_DEAD_TIMES), + conf.get( + RoleElectionOptions.BASE_TIMEOUT_MILLISECOND)); ClusterRoleStore roleStore = new StandardClusterRoleStore(this.params); - this.roleElectionStateMachine = new StandardRoleElectionStateMachine(roleConfig, + this.roleElectionStateMachine = new StandardRoleElectionStateMachine(roleConfig, roleStore); } @@ -572,7 +575,7 @@ protected void reloadRamtable(boolean loadFromFile) { @Override public C compute(Class clazz) - throws IllegalArgumentException { + throws IllegalArgumentException { throw Graph.Exceptions.graphComputerNotSupported(); } @@ -581,11 +584,12 @@ public GraphComputer compute() throws IllegalArgumentException { throw Graph.Exceptions.graphComputerNotSupported(); } - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) @Override public I io(final Io.Builder builder) { return (I) builder.graph(this).onMapper(mapper -> - mapper.addRegistry(HugeGraphIoRegistry.instance()) + mapper.addRegistry( + HugeGraphIoRegistry.instance()) ).create(); } @@ -1135,12 +1139,12 @@ public void registerRpcServices(RpcServiceConfig4Server serverConfig, // The proxy is sometimes unavailable (issue #664) CacheNotifier proxy = clientConfig.serviceProxy(this.name, clazz1); serverConfig.addService(this.name, clazz1, new HugeGraphCacheNotifier( - this.graphEventHub, proxy)); + this.graphEventHub, proxy)); Class clazz2 = SchemaCacheNotifier.class; proxy = clientConfig.serviceProxy(this.name, clazz2); serverConfig.addService(this.name, clazz2, new HugeSchemaCacheNotifier( - this.schemaEventHub, proxy)); + this.schemaEventHub, proxy)); } private void closeTx() { @@ -1493,7 +1497,7 @@ private Txs getOrNewTransaction() { private void destroyTransaction() { if (this.isOpen()) { throw new HugeException( - "Transaction should be closed before destroying"); + "Transaction should be closed before destroying"); } // Do close if needed, then remove the reference @@ -1634,8 +1638,8 @@ public void reload() { } private static class HugeSchemaCacheNotifier - extends AbstractCacheNotifier - implements SchemaCacheNotifier { + extends AbstractCacheNotifier + implements SchemaCacheNotifier { public HugeSchemaCacheNotifier(EventHub hub, CacheNotifier proxy) { super(hub, proxy); @@ -1643,8 +1647,8 @@ public HugeSchemaCacheNotifier(EventHub hub, CacheNotifier proxy) { } private static class HugeGraphCacheNotifier - extends AbstractCacheNotifier - implements GraphCacheNotifier { + extends AbstractCacheNotifier + implements GraphCacheNotifier { public HugeGraphCacheNotifier(EventHub hub, CacheNotifier proxy) { super(hub, proxy); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnalyzerFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnalyzerFactory.java index 463b962278..21dd157b11 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnalyzerFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnalyzerFactory.java @@ -64,12 +64,12 @@ private static Analyzer customizedAnalyzer(String name, String mode) { return clazz.getConstructor(String.class).newInstance(mode); } catch (Exception e) { throw new HugeException( - "Failed to construct analyzer '%s' with mode '%s'", - e, name, mode); + "Failed to construct analyzer '%s' with mode '%s'", + e, name, mode); } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public static void register(String name, String classPath) { ClassLoader classLoader = SerializerFactory.class.getClassLoader(); Class clazz; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java index f32303518f..2f857cd171 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java @@ -26,9 +26,9 @@ import org.ansj.splitWord.analysis.IndexAnalysis; import org.ansj.splitWord.analysis.NlpAnalysis; import org.ansj.splitWord.analysis.ToAnalysis; - import org.apache.hugegraph.config.ConfigException; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableList; /** @@ -48,8 +48,8 @@ public class AnsjAnalyzer implements Analyzer { public AnsjAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for ansj analyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for ansj analyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } this.analysis = mode; } @@ -72,7 +72,7 @@ public Set segment(String text) { break; default: throw new AssertionError(String.format( - "Unsupported segment mode '%s'", this.analysis)); + "Unsupported segment mode '%s'", this.analysis)); } assert terms != null; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/HanLPAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/HanLPAnalyzer.java index baf6c6884a..8b5fa6aa0a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/HanLPAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/HanLPAnalyzer.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.config.ConfigException; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableList; import com.hankcs.hanlp.seg.Dijkstra.DijkstraSegment; import com.hankcs.hanlp.seg.NShort.NShortSegment; @@ -38,14 +39,14 @@ public class HanLPAnalyzer implements Analyzer { public static final List SUPPORT_MODES = - ImmutableList.builder() - .add("standard") - .add("nlp") - .add("index") - .add("nShort") - .add("shortest") - .add("speed") - .build(); + ImmutableList.builder() + .add("standard") + .add("nlp") + .add("index") + .add("nShort") + .add("shortest") + .add("speed") + .build(); private static final Segment N_SHORT_SEGMENT = new NShortSegment().enableCustomDictionary(false) @@ -61,8 +62,8 @@ public class HanLPAnalyzer implements Analyzer { public HanLPAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for hanlp analyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for hanlp analyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } this.tokenizer = mode; } @@ -91,7 +92,7 @@ public Set segment(String text) { break; default: throw new AssertionError(String.format( - "Unsupported segment mode '%s'", this.tokenizer)); + "Unsupported segment mode '%s'", this.tokenizer)); } assert terms != null; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/IKAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/IKAnalyzer.java index 2c25c2f41b..ab564f396f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/IKAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/IKAnalyzer.java @@ -22,11 +22,11 @@ import java.util.Set; import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.config.ConfigException; +import org.apache.hugegraph.util.InsertionOrderUtil; import org.wltea.analyzer.core.IKSegmenter; import org.wltea.analyzer.core.Lexeme; -import org.apache.hugegraph.config.ConfigException; -import org.apache.hugegraph.util.InsertionOrderUtil; import com.google.common.collect.ImmutableList; /** @@ -44,8 +44,8 @@ public class IKAnalyzer implements Analyzer { public IKAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for ikanalyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for ikanalyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } this.smartSegMode = SUPPORT_MODES.get(0).equals(mode); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JcsegAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JcsegAnalyzer.java index ad00ea4769..3740e87b19 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JcsegAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JcsegAnalyzer.java @@ -22,14 +22,14 @@ import java.util.Set; import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.config.ConfigException; +import org.apache.hugegraph.util.InsertionOrderUtil; import org.lionsoul.jcseg.ISegment; import org.lionsoul.jcseg.IWord; import org.lionsoul.jcseg.dic.ADictionary; import org.lionsoul.jcseg.dic.DictionaryFactory; import org.lionsoul.jcseg.segmenter.SegmenterConfig; -import org.apache.hugegraph.config.ConfigException; -import org.apache.hugegraph.util.InsertionOrderUtil; import com.google.common.collect.ImmutableList; /** @@ -50,8 +50,8 @@ public class JcsegAnalyzer implements Analyzer { public JcsegAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for jcseg analyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for jcseg analyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } if ("Simple".equals(mode)) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JiebaAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JiebaAnalyzer.java index b79973c9b4..7c280a50a1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JiebaAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/JiebaAnalyzer.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.config.ConfigException; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableList; import com.huaban.analysis.jieba.JiebaSegmenter; import com.huaban.analysis.jieba.SegToken; @@ -43,8 +44,8 @@ public class JiebaAnalyzer implements Analyzer { public JiebaAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for jieba analyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for jieba analyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } this.segMode = JiebaSegmenter.SegMode.valueOf(mode); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/MMSeg4JAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/MMSeg4JAnalyzer.java index 62264014b7..26e937a538 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/MMSeg4JAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/MMSeg4JAnalyzer.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.HugeException; import org.apache.hugegraph.config.ConfigException; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.chenlb.mmseg4j.ComplexSeg; import com.chenlb.mmseg4j.Dictionary; import com.chenlb.mmseg4j.MMSeg; @@ -51,8 +52,8 @@ public class MMSeg4JAnalyzer implements Analyzer { public MMSeg4JAnalyzer(String mode) { if (!SUPPORT_MODES.contains(mode)) { throw new ConfigException( - "Unsupported segment mode '%s' for mmseg4j analyzer, " + - "the available values are %s", mode, SUPPORT_MODES); + "Unsupported segment mode '%s' for mmseg4j analyzer, " + + "the available values are %s", mode, SUPPORT_MODES); } int index = SUPPORT_MODES.indexOf(mode); switch (index) { @@ -67,7 +68,7 @@ public MMSeg4JAnalyzer(String mode) { break; default: throw new AssertionError(String.format( - "Unsupported segment mode '%s'", mode)); + "Unsupported segment mode '%s'", mode)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/SmartCNAnalyzer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/SmartCNAnalyzer.java index 0776c8a794..f265ade214 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/SmartCNAnalyzer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/SmartCNAnalyzer.java @@ -22,19 +22,18 @@ import java.util.Set; import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.hugegraph.util.InsertionOrderUtil; - /** * Reference from https://lucene.apache.org/core/8_11_2/analyzers-smartcn/index.html */ public class SmartCNAnalyzer implements Analyzer { private static final SmartChineseAnalyzer ANALYZER = - new SmartChineseAnalyzer(); + new SmartChineseAnalyzer(); public SmartCNAnalyzer(String mode) { // pass diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java index 3236c2fe73..51c72fba47 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java @@ -22,8 +22,8 @@ import javax.security.sasl.AuthenticationException; -import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.auth.SchemaDefine.AuthElement; +import org.apache.hugegraph.backend.id.Id; public interface AuthManager { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/EntityManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/EntityManager.java index e6beffda4e..4633625729 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/EntityManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/EntityManager.java @@ -22,26 +22,26 @@ import java.util.Map; import java.util.function.Function; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.iterator.MapperIterator; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Entity; -import org.apache.hugegraph.exception.NotFoundException; -import org.apache.hugegraph.iterator.MapperIterator; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableMap; public class EntityManager { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java index 7ebc8df017..8bec94341b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java @@ -22,17 +22,16 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Relationship; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Relationship; -import org.apache.hugegraph.util.E; - public class HugeAccess extends Relationship { private static final long serialVersionUID = -7644007602408729385L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java index 8df3f2e115..f39a7b6ea2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java @@ -22,15 +22,14 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Relationship; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Relationship; - public class HugeBelong extends Relationship { private static final long serialVersionUID = -7242751631755533423L; @@ -94,12 +93,10 @@ protected boolean property(String key, Object value) { if (super.property(key, value)) { return true; } - switch (key) { - case P.DESCRIPTION: - this.description = (String) value; - break; - default: - throw new AssertionError("Unsupported key: " + key); + if (key.equals(P.DESCRIPTION)) { + this.description = (String) value; + } else { + throw new AssertionError("Unsupported key: " + key); } return true; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java index e25c0825c2..dd6a3d286e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java @@ -22,16 +22,15 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Entity; -import org.apache.hugegraph.util.E; - public class HugeGroup extends Entity { private static final long serialVersionUID = 2330399818352242686L; @@ -186,7 +185,7 @@ public void initSchemaIfNeeded() { this.graph.schemaTransaction().addVertexLabel(label); } - protected String[] initProperties() { + private String[] initProperties() { List props = new ArrayList<>(); props.add(createPropertyKey(P.NAME)); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeProject.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeProject.java index 7283a31332..f630ba6bc0 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeProject.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeProject.java @@ -25,19 +25,18 @@ import java.util.Set; import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Entity; -import org.apache.hugegraph.util.E; - public class HugeProject extends Entity { private static final long serialVersionUID = 8681323499069874520L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeResource.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeResource.java index 3911a58455..9edaac5c65 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeResource.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeResource.java @@ -23,6 +23,10 @@ import java.util.Objects; import java.util.Set; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.auth.SchemaDefine.AuthElement; +import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.traversal.optimize.TraversalUtil; import org.apache.hugegraph.type.Nameable; import org.apache.hugegraph.type.Typeable; import org.apache.hugegraph.util.JsonUtil; @@ -39,10 +43,6 @@ import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.auth.SchemaDefine.AuthElement; -import org.apache.hugegraph.structure.HugeElement; -import org.apache.hugegraph.traversal.optimize.TraversalUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -55,7 +55,7 @@ public class HugeResource { public static final List ALL_RES = ImmutableList.of(ALL); private static final Set CHECK_NAME_RESS = ImmutableSet.of( - ResourceType.META); + ResourceType.META); static { SimpleModule module = new SimpleModule(); @@ -143,7 +143,7 @@ private boolean filter(AuthElement element) { private boolean filter(Nameable element) { assert !(element instanceof Typeable) || this.type.match( - ResourceType.from(((Typeable) element).type())); + ResourceType.from(((Typeable) element).type())); return this.matchLabel(element.name()); } @@ -258,7 +258,8 @@ public static HugeResource parseResource(String resource) { } public static List parseResources(String resources) { - TypeReference type = new TypeReference>() {}; + TypeReference type = new TypeReference>() { + }; return JsonUtil.fromJson(resources, type); } @@ -298,7 +299,7 @@ public HugeResourceSer() { @Override public void serialize(HugeResource res, JsonGenerator generator, SerializerProvider provider) - throws IOException { + throws IOException { generator.writeStartObject(); generator.writeObjectField("type", res.type); @@ -320,7 +321,7 @@ public HugeResourceDeser() { @Override public HugeResource deserialize(JsonParser parser, DeserializationContext ctxt) - throws IOException { + throws IOException { HugeResource res = new HugeResource(); while (parser.nextToken() != JsonToken.END_OBJECT) { String key = parser.getCurrentName(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java index 8b0aec3580..9f19f85f43 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java @@ -22,18 +22,17 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Entity; -import org.apache.hugegraph.util.E; - import com.google.common.collect.ImmutableList; public class HugeTarget extends Entity { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java index 1145662fb2..465d417675 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java @@ -22,16 +22,15 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Entity; -import org.apache.hugegraph.util.E; - public class HugeUser extends Entity { private static final long serialVersionUID = -8951193710873772717L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RelationshipManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RelationshipManager.java index 2fe2260279..a1c440611e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RelationshipManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RelationshipManager.java @@ -22,29 +22,29 @@ import java.util.Map; import java.util.function.Function; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Relationship; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.iterator.MapperIterator; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.SchemaDefine.Relationship; -import org.apache.hugegraph.exception.NotFoundException; -import org.apache.hugegraph.iterator.MapperIterator; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableMap; public class RelationshipManager { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceObject.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceObject.java index 40e5c0ec9b..03234f5972 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceObject.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceObject.java @@ -19,8 +19,8 @@ import org.apache.hugegraph.auth.SchemaDefine.AuthElement; import org.apache.hugegraph.schema.SchemaElement; -import org.apache.hugegraph.type.Nameable; import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.type.Nameable; import org.apache.hugegraph.util.E; public class ResourceObject { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceType.java index c5ab9b96dd..caeafc8d04 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/ResourceType.java @@ -17,6 +17,8 @@ package org.apache.hugegraph.auth; +import java.util.Objects; + import org.apache.hugegraph.type.HugeType; public enum ResourceType { @@ -70,11 +72,8 @@ public boolean match(ResourceType required) { return true; } - switch (required) { - case NONE: - return this != NONE; - default: - break; + if (Objects.requireNonNull(required) == ResourceType.NONE) { + return this != NONE; } switch (this) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RolePermission.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RolePermission.java index 033ffa22b5..b7d776d6ff 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RolePermission.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/RolePermission.java @@ -25,6 +25,7 @@ import java.util.Objects; import java.util.TreeMap; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; import org.apache.tinkerpop.shaded.jackson.annotation.JsonProperty; import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; @@ -37,14 +38,12 @@ import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; -import org.apache.hugegraph.util.E; - public class RolePermission { public static final RolePermission NONE = RolePermission.role( - "none", HugePermission.NONE); + "none", HugePermission.NONE); public static final RolePermission ADMIN = RolePermission.role( - "admin", HugePermission.ANY); + "admin", HugePermission.ANY); static { SimpleModule module = new SimpleModule(); @@ -64,7 +63,7 @@ public RolePermission() { } private RolePermission(Map>> roles) { + List>> roles) { this.roles = roles; } @@ -76,7 +75,7 @@ protected void add(String graph, String action, protected void add(String graph, HugePermission action, List resources) { Map> permissions = - this.roles.get(graph); + this.roles.get(graph); if (permissions == null) { permissions = new TreeMap<>(); this.roles.put(graph, permissions); @@ -95,14 +94,14 @@ public Map>> map() { public boolean contains(RolePermission other) { for (Map.Entry>> e1 : - other.roles.entrySet()) { + other.roles.entrySet()) { String g = e1.getKey(); Map> perms = this.roles.get(g); if (perms == null) { return false; } for (Map.Entry> e2 : - e1.getValue().entrySet()) { + e1.getValue().entrySet()) { List ress = perms.get(e2.getKey()); if (ress == null) { return false; @@ -189,7 +188,7 @@ public static RolePermission builtin(RolePermission role) { } private static class RolePermissionSer - extends StdSerializer { + extends StdSerializer { private static final long serialVersionUID = -2533310506459479383L; @@ -200,7 +199,7 @@ public RolePermissionSer() { @Override public void serialize(RolePermission role, JsonGenerator generator, SerializerProvider provider) - throws IOException { + throws IOException { generator.writeStartObject(); generator.writeObjectField("roles", role.roles); generator.writeEndObject(); @@ -208,7 +207,7 @@ public void serialize(RolePermission role, JsonGenerator generator, } private static class RolePermissionDeser - extends StdDeserializer { + extends StdDeserializer { private static final long serialVersionUID = -2038234657843260957L; @@ -219,9 +218,10 @@ public RolePermissionDeser() { @Override public RolePermission deserialize(JsonParser parser, DeserializationContext ctxt) - throws IOException { + throws IOException { TypeReference type = new TypeReference>>>() {}; + TreeMap>>>() { + }; if ("roles".equals(parser.nextFieldName())) { parser.nextValue(); return new RolePermission(parser.readValueAs(type)); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java index 7a30fe9c49..98072d231b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.auth.HugeTarget.P; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.IndexLabel; @@ -30,18 +31,15 @@ import org.apache.hugegraph.schema.SchemaManager; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.Nameable; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.util.E; - public abstract class SchemaDefine { protected final HugeGraphParams graph; @@ -241,7 +239,7 @@ protected Object[] asArray(List list) { // NOTE: travis-ci fails if class Entity implements Nameable public abstract static class Entity extends AuthElement - implements org.apache.hugegraph.type.Nameable { + implements org.apache.hugegraph.type.Nameable { private static final long serialVersionUID = 4113319546914811762L; @@ -251,7 +249,7 @@ public static T fromVertex(Vertex vertex, T entity) { vertex.label(), entity.label()); entity.id((Id) vertex.id()); for (Iterator> iter = vertex.properties(); - iter.hasNext();) { + iter.hasNext(); ) { VertexProperty prop = iter.next(); entity.property(prop.key(), prop.value()); } @@ -288,7 +286,7 @@ public static T fromEdge(Edge edge, edge.label(), relationship.label()); relationship.id((Id) edge.id()); for (Iterator> iter = edge.properties(); - iter.hasNext();) { + iter.hasNext(); ) { Property prop = iter.next(); relationship.property(prop.key(), prop.value()); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java index d79840380e..60aaf9aab2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java @@ -411,13 +411,13 @@ public Id createProject(HugeProject project) { Id adminGroupId = project.adminGroupId(); Id opGroupId = project.opGroupId(); HugeAccess adminGroupWriteAccess = new HugeAccess( - adminGroupId, targetId, - HugePermission.WRITE); + adminGroupId, targetId, + HugePermission.WRITE); // Ditto adminGroupWriteAccess.creator(project.creator()); HugeAccess adminGroupReadAccess = new HugeAccess( - adminGroupId, targetId, - HugePermission.READ); + adminGroupId, targetId, + HugePermission.READ); // Ditto adminGroupReadAccess.creator(project.creator()); HugeAccess opGroupReadAccess = new HugeAccess(opGroupId, targetId, @@ -634,7 +634,7 @@ private RolePermission rolePermission(HugeTarget target) { @Override public String loginUser(String username, String password) - throws AuthenticationException { + throws AuthenticationException { HugeUser user = this.matchUser(username, password); if (user == null) { String msg = "Incorrect username or password"; @@ -672,10 +672,10 @@ public UserWithRole validateUser(String token) { Claims payload = null; boolean needBuildCache = false; if (username == null) { - try{ + try { payload = this.tokenGenerator.verify(token); - }catch (Throwable t){ - LOG.error(String.format("Failed to verify token:[ %s ], cause:",token),t); + } catch (Throwable t) { + LOG.error(String.format("Failed to verify token:[ %s ], cause:", token), t); return new UserWithRole(""); } username = (String) payload.get(AuthConstant.TOKEN_USER_NAME); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/TokenGenerator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/TokenGenerator.java index e8ee77e8f1..0814e84b1a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/TokenGenerator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/TokenGenerator.java @@ -22,7 +22,6 @@ import java.util.Map; import javax.crypto.SecretKey; -import jakarta.ws.rs.NotAuthorizedException; import org.apache.hugegraph.config.AuthOptions; import org.apache.hugegraph.config.HugeConfig; @@ -34,6 +33,7 @@ import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import io.jsonwebtoken.security.Keys; +import jakarta.ws.rs.NotAuthorizedException; public class TokenGenerator { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/BackendException.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/BackendException.java index 95f271b798..a1f587866e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/BackendException.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/BackendException.java @@ -44,7 +44,7 @@ public BackendException(Throwable cause) { } public static final void check(boolean expression, String message, Object... args) - throws BackendException { + throws BackendException { if (!expression) { throw new BackendException(message, args); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/AbstractCache.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/AbstractCache.java index dff48f509c..e26a779f4c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/AbstractCache.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/AbstractCache.java @@ -22,10 +22,9 @@ import java.util.concurrent.atomic.LongAdder; import java.util.function.Function; -import org.slf4j.Logger; - import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public abstract class AbstractCache implements Cache { @@ -183,7 +182,7 @@ public long tick() { int expireItems = 0; long current = now(); - for (Iterator> it = this.nodes(); it.hasNext();) { + for (Iterator> it = this.nodes(); it.hasNext(); ) { CacheNode node = it.next(); if (current - node.time() >= expireTime) { // Remove item while iterating map (it must be ConcurrentMap) diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java index 8e7742ef87..c7a43e228e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java @@ -24,12 +24,11 @@ import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public class CacheManager { @@ -69,7 +68,7 @@ private TimerTask scheduleTimer(float period) { public void run() { try { for (Entry> entry : - caches().entrySet()) { + caches().entrySet()) { this.tick(entry.getKey(), entry.getValue()); } } catch (Throwable e) { @@ -97,7 +96,7 @@ private void tick(String name, Cache cache) { } public Map> caches() { - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) Map> caches = (Map) this.caches; return Collections.unmodifiableMap(caches); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheNotifier.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheNotifier.java index a6ecebb118..e470f1067a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheNotifier.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheNotifier.java @@ -30,7 +30,11 @@ public interface CacheNotifier extends AutoCloseable { void reload(); - interface GraphCacheNotifier extends CacheNotifier {} + interface GraphCacheNotifier extends CacheNotifier { - interface SchemaCacheNotifier extends CacheNotifier {} + } + + interface SchemaCacheNotifier extends CacheNotifier { + + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedBackendStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedBackendStore.java index a6a2bdc71d..a1c632d9f5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedBackendStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedBackendStore.java @@ -19,8 +19,8 @@ import java.util.Iterator; -import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.backend.store.BackendFeatures; import org.apache.hugegraph.backend.store.BackendMutation; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedGraphTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedGraphTransaction.java index bb14444d95..89f047baed 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedGraphTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedGraphTransaction.java @@ -25,15 +25,15 @@ import java.util.List; import java.util.Set; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.cache.CachedBackendStore.QueryId; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.store.BackendMutation; import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.backend.store.ram.RamTable; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.IdQuery; -import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; @@ -49,6 +49,7 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Events; + import com.google.common.collect.ImmutableSet; public final class CachedGraphTransaction extends GraphTransaction { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java index 9de996e188..ba5c5821b9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java @@ -23,12 +23,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Consumer; -import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.backend.store.ram.IntObjectMap; -import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.backend.store.ram.IntObjectMap; +import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.event.EventHub; import org.apache.hugegraph.event.EventListener; @@ -37,6 +37,7 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Events; + import com.google.common.collect.ImmutableSet; public final class CachedSchemaTransaction extends SchemaTransaction { @@ -437,7 +438,7 @@ private void setValue(HugeType type, int key, V value) { } private static class CachedTypes - extends ConcurrentHashMap { + extends ConcurrentHashMap { private static final long serialVersionUID = -2215549791679355996L; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/OffheapCache.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/OffheapCache.java index a97898f2ba..d641c8276f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/OffheapCache.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/OffheapCache.java @@ -24,14 +24,6 @@ import java.util.List; import java.util.function.Consumer; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; -import org.caffinitas.ohc.CacheSerializer; -import org.caffinitas.ohc.CloseableIterator; -import org.caffinitas.ohc.Eviction; -import org.caffinitas.ohc.OHCache; -import org.caffinitas.ohc.OHCacheBuilder; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; @@ -39,6 +31,8 @@ import org.apache.hugegraph.backend.serializer.BinaryBackendEntry; import org.apache.hugegraph.backend.serializer.BinarySerializer; import org.apache.hugegraph.backend.serializer.BytesBuffer; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.HugeType; @@ -46,6 +40,11 @@ import org.apache.hugegraph.util.Bytes; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; +import org.caffinitas.ohc.CacheSerializer; +import org.caffinitas.ohc.CloseableIterator; +import org.caffinitas.ohc.Eviction; +import org.caffinitas.ohc.OHCache; +import org.caffinitas.ohc.OHCacheBuilder; public class OffheapCache extends AbstractCache { @@ -119,7 +118,7 @@ protected boolean write(Id id, Object value, long timeOffset) { } if (serializedSize > VALUE_SIZE_TO_SKIP) { LOG.info("Skip to cache '{}' due to value size {} > limit {}", - id, serializedSize, VALUE_SIZE_TO_SKIP); + id, serializedSize, VALUE_SIZE_TO_SKIP); return false; } @@ -334,13 +333,13 @@ private Object deserializeElement(ValueType type, BytesBuffer buffer) { private HugeException unsupported(ValueType type) { throw new HugeException( - "Unsupported deserialize type: %s", type); + "Unsupported deserialize type: %s", type); } private HugeException unsupported(Object value) { throw new HugeException( - "Unsupported type of serialize value: '%s'(%s)", - value, value.getClass()); + "Unsupported type of serialize value: '%s'(%s)", + value, value.getClass()); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/RamCache.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/RamCache.java index 05366f9af0..a5e8d162a8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/RamCache.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/RamCache.java @@ -156,7 +156,7 @@ protected final void remove(Id id) { @Override protected Iterator> nodes() { Iterator> iter = this.map.values().iterator(); - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) Iterator> iterSuper = (Iterator) iter; return iterSuper; } @@ -280,8 +280,8 @@ public LinkedQueueNonBigLock() { /** * Reset the head node and rear node * NOTE: - * only called by LinkedQueueNonBigLock() without lock - * or called by clear() with lock(head, rear) + * only called by LinkedQueueNonBigLock() without lock + * or called by clear() with lock(head, rear) */ private void reset() { this.head.prev = this.empty; @@ -316,7 +316,7 @@ private boolean checkNotInQueue(K key) { List keys = this.dumpKeys(); if (keys.contains(key)) { throw new RuntimeException(String.format( - "Expect %s should be not in %s", key, keys)); + "Expect %s should be not in %s", key, keys)); } return true; } @@ -338,8 +338,8 @@ private boolean checkPrevNotInNext(LinkNode self) { int selfPos = keys.indexOf(self.key()); if (prevPos > selfPos && selfPos != -1) { throw new RuntimeException(String.format( - "Expect %s should be before %s, actual %s", - prev.key(), self.key(), keys)); + "Expect %s should be before %s, actual %s", + prev.key(), self.key(), keys)); } return true; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/EdgeId.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/EdgeId.java index 711ee0d87e..a9053e5135 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/EdgeId.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/EdgeId.java @@ -35,12 +35,12 @@ */ public class EdgeId implements Id { - public static final HugeKeys[] KEYS = new HugeKeys[] { - HugeKeys.OWNER_VERTEX, - HugeKeys.DIRECTION, - HugeKeys.LABEL, - HugeKeys.SORT_VALUES, - HugeKeys.OTHER_VERTEX + public static final HugeKeys[] KEYS = new HugeKeys[]{ + HugeKeys.OWNER_VERTEX, + HugeKeys.DIRECTION, + HugeKeys.LABEL, + HugeKeys.SORT_VALUES, + HugeKeys.OTHER_VERTEX }; private final Id ownerVertexId; @@ -135,17 +135,17 @@ public String asString() { } if (this.directed) { this.cache = SplicingIdGenerator.concat( - IdUtil.writeString(this.ownerVertexId), - this.direction.type().string(), - IdUtil.writeLong(this.edgeLabelId), - this.sortValues, - IdUtil.writeString(this.otherVertexId)); + IdUtil.writeString(this.ownerVertexId), + this.direction.type().string(), + IdUtil.writeLong(this.edgeLabelId), + this.sortValues, + IdUtil.writeString(this.otherVertexId)); } else { this.cache = SplicingIdGenerator.concat( - IdUtil.writeString(this.sourceVertexId()), - IdUtil.writeLong(this.edgeLabelId), - this.sortValues, - IdUtil.writeString(this.targetVertexId())); + IdUtil.writeString(this.sourceVertexId()), + IdUtil.writeLong(this.edgeLabelId), + this.sortValues, + IdUtil.writeString(this.targetVertexId())); } return this.cache; } @@ -233,7 +233,7 @@ public static EdgeId parse(String id) throws NotFoundException { } public static EdgeId parse(String id, boolean returnNullIfError) - throws NotFoundException { + throws NotFoundException { String[] idParts = SplicingIdGenerator.split(id); if (!(idParts.length == 4 || idParts.length == 5)) { if (returnNullIfError) { @@ -284,10 +284,10 @@ public static Id parseStoredString(String id) { public static String asStoredString(Id id) { EdgeId eid = (EdgeId) id; return SplicingIdGenerator.concat( - IdUtil.writeStoredString(eid.sourceVertexId()), - IdGenerator.asStoredString(eid.edgeLabelId()), - eid.sortValues(), - IdUtil.writeStoredString(eid.targetVertexId())); + IdUtil.writeStoredString(eid.sourceVertexId()), + IdGenerator.asStoredString(eid.edgeLabelId()), + eid.sortValues(), + IdUtil.writeStoredString(eid.targetVertexId())); } public static String concat(String... ids) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdUtil.java index 11d9378f01..90cd98d4b7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdUtil.java @@ -20,7 +20,6 @@ import java.nio.ByteBuffer; import org.apache.commons.lang3.StringUtils; - import org.apache.hugegraph.backend.id.Id.IdType; import org.apache.hugegraph.backend.serializer.BytesBuffer; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SnowflakeIdGenerator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SnowflakeIdGenerator.java index 0ac86353fc..21059e5290 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SnowflakeIdGenerator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SnowflakeIdGenerator.java @@ -16,8 +16,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; @@ -27,13 +25,14 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; import org.apache.hugegraph.util.TimeUtil; +import org.slf4j.Logger; public class SnowflakeIdGenerator extends IdGenerator { private static final Logger LOG = Log.logger(SnowflakeIdGenerator.class); private static final Map INSTANCES = - new ConcurrentHashMap<>(); + new ConcurrentHashMap<>(); private final boolean forceString; private final IdWorker idWorker; @@ -114,13 +113,13 @@ public IdWorker(long workerId, long datacenterId) { // Sanity check for workerId if (workerId > MAX_WORKER_ID || workerId < 0) { throw new IllegalArgumentException(String.format( - "Worker id can't > %d or < 0", - MAX_WORKER_ID)); + "Worker id can't > %d or < 0", + MAX_WORKER_ID)); } if (datacenterId > MAX_DC_ID || datacenterId < 0) { throw new IllegalArgumentException(String.format( - "Datacenter id can't > %d or < 0", - MAX_DC_ID)); + "Datacenter id can't > %d or < 0", + MAX_DC_ID)); } this.workerId = workerId; this.datacenterId = datacenterId; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SplicingIdGenerator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SplicingIdGenerator.java index 7e6e29262d..47f098fa73 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SplicingIdGenerator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SplicingIdGenerator.java @@ -70,8 +70,9 @@ public Id generate(HugeVertex vertex) { /** * Concat multiple ids into one composite id with IDS_SPLITOR + * * @param ids the string id values to be concatted - * @return concatted string value + * @return concatted string value */ public static String concat(String... ids) { // NOTE: must support string id when using this method @@ -80,8 +81,9 @@ public static String concat(String... ids) { /** * Split a composite id into multiple ids with IDS_SPLITOR + * * @param ids the string id value to be splitted - * @return splitted string values + * @return splitted string values */ public static String[] split(String ids) { return IdUtil.unescape(ids, IDS_SPLITOR_STR, ESCAPE_STR); @@ -89,8 +91,9 @@ public static String[] split(String ids) { /** * Concat property values with NAME_SPLITOR + * * @param values the property values to be concatted - * @return concatted string value + * @return concatted string value */ public static String concatValues(List values) { // Convert the object list to string array @@ -104,8 +107,9 @@ public static String concatValues(List values) { /** * Concat property values with NAME_SPLITOR + * * @param values the property values to be concatted - * @return concatted string value + * @return concatted string value */ public static String concatValues(Object... values) { return concatValues(Arrays.asList(values)); @@ -113,8 +117,9 @@ public static String concatValues(Object... values) { /** * Concat multiple parts into a single id with ID_SPLITOR + * * @param parts the string id values to be spliced - * @return spliced id object + * @return spliced id object */ public static Id splicing(String... parts) { String escaped = IdUtil.escape(ID_SPLITOR, ESCAPE, parts); @@ -123,8 +128,9 @@ public static Id splicing(String... parts) { /** * Parse a single id into multiple parts with ID_SPLITOR + * * @param id the id object to be parsed - * @return parsed string id parts + * @return parsed string id parts */ public static String[] parse(Id id) { return IdUtil.unescape(id.asString(), ID_SPLITOR_STR, ESCAPE_STR); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/IdHolder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/IdHolder.java index 37fe463508..b420648767 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/IdHolder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/IdHolder.java @@ -23,15 +23,14 @@ import java.util.function.Function; import org.apache.commons.lang.NotImplementedException; +import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - -import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.iterator.CIter; import org.apache.hugegraph.iterator.Metadatable; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public abstract class IdHolder { @@ -133,7 +132,7 @@ public Set all() { } public static class BatchIdHolder extends IdHolder - implements CIter { + implements CIter { private final Iterator entries; private final Function> fetcher; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageEntryIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageEntryIterator.java index 5d8370824b..bbc93c79b6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageEntryIterator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageEntryIterator.java @@ -21,11 +21,10 @@ import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.iterator.CIter; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public class PageEntryIterator implements CIter { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageIds.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageIds.java index 96b9d25cce..879821f3db 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageIds.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/PageIds.java @@ -20,6 +20,7 @@ import java.util.Set; import org.apache.hugegraph.backend.id.Id; + import com.google.common.collect.ImmutableSet; public final class PageIds { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/QueryList.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/QueryList.java index 7eb10e0165..d1e11e9220 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/QueryList.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/QueryList.java @@ -22,14 +22,14 @@ import java.util.List; import java.util.Set; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.page.IdHolder.BatchIdHolder; +import org.apache.hugegraph.backend.page.IdHolder.FixedIdHolder; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.ConditionQuery.OptimizedType; import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.page.IdHolder.BatchIdHolder; -import org.apache.hugegraph.backend.page.IdHolder.FixedIdHolder; import org.apache.hugegraph.util.Bytes; import org.apache.hugegraph.util.E; @@ -46,7 +46,7 @@ public QueryList(Query parent, QueryResults.Fetcher fetcher) { this.queries = new ArrayList<>(); } - protected Query parent() { + Query parent() { return this.parent; } @@ -98,7 +98,7 @@ public QueryResults fetch(int pageSize) { return QueryResults.flatMap(this.queries.iterator(), FlattenQuery::iterator); } - protected PageResults fetchNext(PageInfo pageInfo, long pageSize) { + PageResults fetchNext(PageInfo pageInfo, long pageSize) { FlattenQuery query = null; int offset = pageInfo.offset(); int visited = 0; @@ -126,16 +126,18 @@ private interface FlattenQuery { /** * For non-paging situation - * @return BackendEntry iterator + * + * @return BackendEntry iterator */ QueryResults iterator(); /** * For paging situation - * @param index position IdHolder(Query) - * @param page set query page - * @param pageSize set query page size - * @return BackendEntry iterator with page + * + * @param index position IdHolder(Query) + * @param page set query page + * @param pageSize set query page size + * @return BackendEntry iterator with page */ PageResults iterator(int index, String page, long pageSize); @@ -323,8 +325,8 @@ private QueryResults queryByIndexIds(Set ids, boolean inOrder) { public static class PageResults { public static final PageResults EMPTY = new PageResults<>( - QueryResults.empty(), - PageState.EMPTY); + QueryResults.empty(), + PageState.EMPTY); private final QueryResults results; private final PageState pageState; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/SortByCountIdHolderList.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/SortByCountIdHolderList.java index f2daad0d42..eeac978672 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/SortByCountIdHolderList.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/page/SortByCountIdHolderList.java @@ -22,11 +22,12 @@ import java.util.Map; import java.util.Set; -import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.page.IdHolder.FixedIdHolder; +import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableSet; public class SortByCountIdHolderList extends IdHolderList { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/BatchConditionQuery.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/BatchConditionQuery.java index 99da61efe1..181ee48648 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/BatchConditionQuery.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/BatchConditionQuery.java @@ -43,7 +43,7 @@ public void mergeToIN(ConditionQuery query, HugeKeys key) { if (this.in == null) { assert !this.containsRelation(RelationType.IN); this.resetConditions(InsertionOrderUtil.newList( - (List) query.conditions())); + (List) query.conditions())); this.unsetCondition(key); List list = new ArrayList<>(this.batchSize); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java index 6d3863d856..579b349c85 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java @@ -28,9 +28,8 @@ import java.util.function.BiPredicate; import org.apache.commons.lang.ArrayUtils; - -import org.apache.hugegraph.backend.store.Shard; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.store.Shard; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.type.define.HugeKeys; @@ -38,6 +37,7 @@ import org.apache.hugegraph.util.DateUtil; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.NumericUtil; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -47,7 +47,7 @@ public enum ConditionType { NONE, RELATION, AND, - OR; + OR } public enum RelationType implements BiPredicate { @@ -144,12 +144,12 @@ public enum RelationType implements BiPredicate { private final Class v2Class; RelationType(String op, - BiFunction tester) { + BiFunction tester) { this(op, null, null, tester); } RelationType(String op, Class v1Class, Class v2Class, - BiFunction tester) { + BiFunction tester) { this.operator = op; this.tester = tester; this.v1Class = v1Class; @@ -162,7 +162,8 @@ public String string() { /** * Determine two values of any type equal - * @param first is actual value + * + * @param first is actual value * @param second is value in query condition * @return true if equal, otherwise false */ @@ -186,13 +187,14 @@ private static boolean equals(final Object first, /** * Determine two numbers equal - * @param first is actual value, might be Number/Date or String, It is - * probably that the `first` is serialized to String. + * + * @param first is actual value, might be Number/Date or String, It is + * probably that the `first` is serialized to String. * @param second is value in query condition, must be Number/Date * @return the value 0 if first is numerically equal to second; - * a value less than 0 if first is numerically less than - * second; and a value greater than 0 if first is - * numerically greater than second. + * a value less than 0 if first is numerically less than + * second; and a value greater than 0 if first is + * numerically greater than second. */ private static int compare(final Object first, final Object second) { assert second != null; @@ -204,9 +206,9 @@ private static int compare(final Object first, final Object second) { } throw new IllegalArgumentException(String.format( - "Can't compare between %s(%s) and %s(%s)", first, - first == null ? null : first.getClass().getSimpleName(), - second, second.getClass().getSimpleName())); + "Can't compare between %s(%s) and %s(%s)", first, + first == null ? null : first.getClass().getSimpleName(), + second, second.getClass().getSimpleName())); } private static int compareDate(Object first, Date second) { @@ -218,9 +220,9 @@ private static int compareDate(Object first, Date second) { } throw new IllegalArgumentException(String.format( - "Can't compare between %s(%s) and %s(%s)", - first, first.getClass().getSimpleName(), - second, second.getClass().getSimpleName())); + "Can't compare between %s(%s) and %s(%s)", + first, first.getClass().getSimpleName(), + second, second.getClass().getSimpleName())); } private void checkBaseType(Object value, Class clazz) { @@ -549,8 +551,8 @@ public abstract static class Relation extends Condition { protected Object serialValue; protected static final Set UNFLATTEN_RELATION_TYPES = - ImmutableSet.of(RelationType.IN, RelationType.NOT_IN, - RelationType.TEXT_CONTAINS_ANY); + ImmutableSet.of(RelationType.IN, RelationType.NOT_IN, + RelationType.TEXT_CONTAINS_ANY); @Override public ConditionType type() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java index 716a47c391..f306d38798 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java @@ -44,6 +44,7 @@ import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.LongEncoding; import org.apache.hugegraph.util.NumericUtil; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; @@ -57,10 +58,11 @@ public class ConditionQuery extends IdQuery { public static final char INDEX_SYM_MAX = '\u0003'; // Note: here we use "new String" to distinguish normal string code - public static final String INDEX_VALUE_NULL = new String(""); - public static final String INDEX_VALUE_EMPTY = new String(""); + public static final String INDEX_VALUE_NULL = ""; + public static final String INDEX_VALUE_EMPTY = ""; public static final Set IGNORE_SYM_SET; + static { List list = new ArrayList<>(INDEX_SYM_MAX - INDEX_SYM_MIN); for (char ch = INDEX_SYM_MIN; ch <= INDEX_SYM_MAX; ch++) { @@ -422,6 +424,7 @@ public Set userpropKeys() { /** * This method is only used for secondary index scenario, * its relation must be EQ + * * @param fields the user property fields * @return the corresponding user property serial values of fields */ @@ -443,8 +446,8 @@ public String userpropValuesString(List fields) { } if (!got) { throw new BackendException( - "No such userprop named '%s' in the query '%s'", - field, this); + "No such userprop named '%s' in the query '%s'", + field, this); } } return concatValues(values); @@ -602,7 +605,7 @@ public boolean mayHasDupKeys(Set keys) { public void optimized(OptimizedType optimizedType) { assert this.optimizedType.ordinal() <= optimizedType.ordinal() : - this.optimizedType + " !<= " + optimizedType; + this.optimizedType + " !<= " + optimizedType; this.optimizedType = optimizedType; Query originQuery = this.originQuery(); @@ -672,7 +675,8 @@ public static String concatValues(List values) { public static String concatValues(Object value) { if (value instanceof String) { return escapeSpecialValueIfNeeded((String) value); - } if (value instanceof List) { + } + if (value instanceof List) { return concatValues((List) value); } else if (needConvertNumber(value)) { return LongEncoding.encodeNumber(value); @@ -736,7 +740,7 @@ public void addIndexValue(Id indexField, Id elementId, this.filed2IndexValues.putIfAbsent(indexField, new HashMap<>()); } Map> element2IndexValueMap = - this.filed2IndexValues.get(indexField); + this.filed2IndexValues.get(indexField); if (element2IndexValueMap.containsKey(elementId)) { element2IndexValueMap.get(elementId).add(indexValue); } else { @@ -781,7 +785,7 @@ public boolean checkRangeIndex(HugeElement element, Condition cond) { } Condition.UserpropRelation propRelation = - (Condition.UserpropRelation) cond; + (Condition.UserpropRelation) cond; Id propId = propRelation.key(); Set fieldValues = this.toRemoveIndexValues(propId, element.id()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQueryFlatten.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQueryFlatten.java index fc488cc4b9..83af41b008 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQueryFlatten.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQueryFlatten.java @@ -40,7 +40,7 @@ public final class ConditionQueryFlatten { private static final Set SPECIAL_KEYS = ImmutableSet.of( - HugeKeys.LABEL + HugeKeys.LABEL ); public static List flatten(ConditionQuery query) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/IdQuery.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/IdQuery.java index e5e3c9a3a6..f7a1c732b3 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/IdQuery.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/IdQuery.java @@ -27,6 +27,7 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -157,7 +158,7 @@ public int idsSize() { @Override public Set ids() { return this.id == null ? ImmutableSet.of() : - ImmutableSet.of(this.id); + ImmutableSet.of(this.id); } @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java index 36776be592..56352e4c11 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java @@ -402,8 +402,8 @@ public void checkCapacity(long count) throws LimitExceedException { query = query.substring(0, MAX_CHARS) + "..."; } throw new LimitExceedException( - "Too many records(must <= %s) for the query: %s", - this.capacity, query); + "Too many records(must <= %s) for the query: %s", + this.capacity, query); } } @@ -575,8 +575,8 @@ public static long defaultCapacity() { public static void checkForceCapacity(long count) throws LimitExceedException { if (count > Query.DEFAULT_CAPACITY) { throw new LimitExceedException( - "Too many records(must <= %s) for one query", - Query.DEFAULT_CAPACITY); + "Too many records(must <= %s) for one query", + Query.DEFAULT_CAPACITY); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/QueryResults.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/QueryResults.java index 12440dfee0..ca457a63a7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/QueryResults.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/QueryResults.java @@ -26,8 +26,6 @@ import java.util.Set; import java.util.function.Function; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.iterator.CIter; @@ -38,13 +36,14 @@ import org.apache.hugegraph.type.Idfiable; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public class QueryResults { private static final Iterator EMPTY_ITERATOR = new EmptyIterator<>(); private static final QueryResults EMPTY = new QueryResults<>( - emptyIterator(), Query.NONE); + emptyIterator(), Query.NONE); private final Iterator results; private final List queries; @@ -97,7 +96,7 @@ public List queries() { } public Iterator keepInputOrderIfNeeded( - Iterator origin) { + Iterator origin) { if (!origin.hasNext()) { // None result found return origin; @@ -216,7 +215,7 @@ public static void fillMap(Iterator iterator, } public static QueryResults flatMap( - Iterator iterator, Function> func) { + Iterator iterator, Function> func) { @SuppressWarnings("unchecked") QueryResults[] qr = new QueryResults[1]; qr[0] = new QueryResults<>(new FlatMapperIterator<>(iterator, i -> { @@ -266,7 +265,9 @@ public static Iterator emptyIterator() { return (Iterator) EMPTY_ITERATOR; } - public interface Fetcher extends Function> {} + public interface Fetcher extends Function> { + + } private static class EmptyIterator implements CIter { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java index 72a97a63e4..5c1b36d1cf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java @@ -23,11 +23,11 @@ import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.type.HugeType; public abstract class AbstractSerializer - implements GraphSerializer, SchemaSerializer { + implements GraphSerializer, SchemaSerializer { protected HugeConfig config; @@ -36,7 +36,7 @@ public AbstractSerializer() { } public AbstractSerializer(HugeConfig config) { - this.config = config; + this.config = config; } protected BackendEntry convertEntry(BackendEntry entry) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryScatterSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryScatterSerializer.java index 3e0c8f2a77..18b642a001 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryScatterSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryScatterSerializer.java @@ -22,12 +22,12 @@ import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; +import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; import org.apache.hugegraph.type.define.HugeKeys; -import org.apache.hugegraph.config.HugeConfig; public class BinaryScatterSerializer extends BinarySerializer { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java index 453125853a..515a6cfda3 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java @@ -24,17 +24,12 @@ import java.util.Map; import org.apache.commons.lang.NotImplementedException; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.page.PageState; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.util.*; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.Condition.RangeConditions; import org.apache.hugegraph.backend.query.ConditionQuery; @@ -42,6 +37,8 @@ import org.apache.hugegraph.backend.query.IdRangeQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.serializer.BinaryBackendEntry.BinaryId; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; @@ -55,6 +52,7 @@ import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.AggregateType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; @@ -66,7 +64,10 @@ import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.type.define.SerialEnum; import org.apache.hugegraph.type.define.WriteType; +import org.apache.hugegraph.util.Bytes; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.NumericUtil; import org.apache.hugegraph.util.StringEncoding; public class BinarySerializer extends AbstractSerializer { @@ -208,7 +209,7 @@ protected void parseProperty(Id pkeyId, BytesBuffer buffer, } else { if (!(value instanceof Collection)) { throw new BackendException( - "Invalid value of non-single property: %s", value); + "Invalid value of non-single property: %s", value); } owner.addProperty(pkey, value); } @@ -867,7 +868,9 @@ public BackendEntry parse(BackendEntry originEntry) { buffer.write(parsedEntry.id().asBytes()); buffer.write(bytes); parsedEntry = new BinaryBackendEntry(originEntry.type(), new BinaryId(buffer.bytes(), - BytesBuffer.wrap(buffer.bytes()).readEdgeId())); + BytesBuffer.wrap( + buffer.bytes()) + .readEdgeId())); for (BackendColumn col : originEntry.columns()) { parsedEntry.column(buffer.bytes(), col.value); @@ -913,9 +916,7 @@ protected static boolean indexIdLengthExceedLimit(Id id) { protected static boolean indexFieldValuesUnmatched(byte[] value, Object fieldValues) { if (value != null && value.length > 0 && fieldValues != null) { - if (!StringEncoding.decode(value).equals(fieldValues)) { - return true; - } + return !StringEncoding.decode(value).equals(fieldValues); } return false; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java index 47aa9d36dc..7cc15188d7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java @@ -18,7 +18,6 @@ package org.apache.hugegraph.backend.serializer; import java.io.OutputStream; -import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; @@ -29,13 +28,14 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.Id.IdType; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.serializer.BinaryBackendEntry.BinaryId; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; -import org.apache.hugegraph.util.*; -import org.apache.hugegraph.backend.serializer.BinaryBackendEntry.BinaryId; import org.apache.hugegraph.util.Blob; +import org.apache.hugegraph.util.Bytes; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.KryoUtil; import org.apache.hugegraph.util.StringEncoding; @@ -170,7 +170,7 @@ private void require(int size) { "Capacity exceeds max buffer capacity: %s", MAX_BUFFER_CAPACITY); ByteBuffer newBuffer = ByteBuffer.allocate(newCapacity); - ((Buffer) this.buffer).flip(); + this.buffer.flip(); newBuffer.put(this.buffer); this.buffer = newBuffer; } @@ -344,7 +344,7 @@ public BytesBuffer writeStringWithEnding(String value) { * 0xFF is not a valid byte in UTF8 bytes */ assert !Bytes.contains(bytes, STRING_ENDING_BYTE_FF) : - "Invalid UTF8 bytes: " + value; + "Invalid UTF8 bytes: " + value; if (Bytes.contains(bytes, STRING_ENDING_BYTE)) { E.checkArgument(false, "Can't contains byte '0x00' in string: '%s'", @@ -421,7 +421,7 @@ public BytesBuffer writeVInt(int value) { this.write(0x80 | ((value >>> 14) & 0x7f)); } if (value > 0x7f || value < 0) { - this.write(0x80 | ((value >>> 7) & 0x7f)); + this.write(0x80 | ((value >>> 7) & 0x7f)); } this.write(value & 0x7f); @@ -485,7 +485,7 @@ public BytesBuffer writeVLong(long value) { this.write(0x80 | ((int) (value >>> 14) & 0x7f)); } if (value > 0x7fL || value < 0L) { - this.write(0x80 | ((int) (value >>> 7) & 0x7f)); + this.write(0x80 | ((int) (value >>> 7) & 0x7f)); } this.write((int) value & 0x7f); @@ -874,7 +874,7 @@ private long readNumber(byte b) { value |= this.readUInt16(); break; case 2: - value |= this.readUInt8() << 16 | this.readUInt16(); + value |= (long) this.readUInt8() << 16 | this.readUInt16(); break; case 3: value |= this.readUInt32(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java index d25d9564ae..289bc762f0 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java @@ -22,12 +22,12 @@ import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeEdgeProperty; import org.apache.hugegraph.structure.HugeIndex; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.hugegraph.type.HugeType; public interface GraphSerializer { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/MergeIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/MergeIterator.java index a885626238..fa7cfe9542 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/MergeIterator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/MergeIterator.java @@ -36,12 +36,12 @@ public MergeIterator(Iterator originIterator, List> iterators, BiFunction merger) { E.checkArgumentNotNull(originIterator, "The origin iterator of " + - "MergeIterator can't be null"); + "MergeIterator can't be null"); E.checkArgument(iterators != null && !iterators.isEmpty(), "The iterators of MergeIterator can't be " + "null or empty"); E.checkArgumentNotNull(merger, "The merger function of " + - "MergeIterator can't be null"); + "MergeIterator can't be null"); this.originIterator = originIterator; this.headElements = new ArrayList<>(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/SerializerFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/SerializerFactory.java index 39b52c7160..db5fe0a8cd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/SerializerFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/SerializerFactory.java @@ -56,7 +56,7 @@ public static AbstractSerializer serializer(HugeConfig config, String name) { } } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public static void register(String name, String classPath) { ClassLoader classLoader = SerializerFactory.class.getClassLoader(); Class clazz; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableBackendEntry.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableBackendEntry.java index c33cd97aa3..918a95b511 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableBackendEntry.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableBackendEntry.java @@ -27,7 +27,6 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.NotImplementedException; - import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.type.HugeType; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableSerializer.java index 9d38260985..fbca2a9a19 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TableSerializer.java @@ -32,8 +32,6 @@ import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; @@ -48,6 +46,7 @@ import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.AggregateType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; @@ -60,6 +59,7 @@ import org.apache.hugegraph.type.define.SerialEnum; import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; public abstract class TableSerializer extends AbstractSerializer { @@ -108,7 +108,7 @@ protected void parseProperty(Id key, Object colValue, HugeElement owner) { } else { if (!(value instanceof Collection)) { throw new BackendException( - "Invalid value of non-single property: %s", value); + "Invalid value of non-single property: %s", value); } owner.addProperty(pkey, value); } @@ -157,9 +157,10 @@ protected TableBackendEntry.Row formatEdge(HugeEdge edge) { /** * Parse an edge from a entry row - * @param row edge entry + * + * @param row edge entry * @param vertex null or the source vertex - * @param graph the HugeGraph context object + * @param graph the HugeGraph context object * @return the source vertex */ protected HugeEdge parseEdge(TableBackendEntry.Row row, @@ -560,8 +561,8 @@ public PropertyKey readPropertyKey(HugeGraph graph, AggregateType aggregateType = schemaEnum(entry, HugeKeys.AGGREGATE_TYPE, AggregateType.class); WriteType writeType = schemaEnumOrDefault( - entry, HugeKeys.WRITE_TYPE, - WriteType.class, WriteType.OLTP); + entry, HugeKeys.WRITE_TYPE, + WriteType.class, WriteType.OLTP); Object properties = schemaColumn(entry, HugeKeys.PROPERTIES); SchemaStatus status = schemaEnum(entry, HugeKeys.STATUS, SchemaStatus.class); @@ -697,9 +698,9 @@ private static T schemaEnum(TableBackendEntry entry, } private static T schemaEnumOrDefault( - TableBackendEntry entry, - HugeKeys key, Class clazz, - T defaultValue) { + TableBackendEntry entry, + HugeKeys key, Class clazz, + T defaultValue) { assert entry.type().isSchema(); Number value = entry.column(key); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextBackendEntry.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextBackendEntry.java index 871da99893..2dfdb2cba9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextBackendEntry.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextBackendEntry.java @@ -165,7 +165,7 @@ public boolean containsPrefix(String column) { } public boolean containsValue(String value) { - return this.columns.values().contains(value); + return this.columns.containsValue(value); } public void append(TextBackendEntry entry) { @@ -371,6 +371,7 @@ public boolean equals(Object obj) { return true; } + @Override public int hashCode() { return this.id().hashCode() ^ this.columns().hashCode(); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java index d99cf36e0a..b3e1737212 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java @@ -23,15 +23,10 @@ import java.util.List; import java.util.Map; +import org.apache.commons.lang.NotImplementedException; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.BackendException; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.config.HugeConfig; -import org.apache.commons.lang.NotImplementedException; - -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; @@ -42,6 +37,8 @@ import org.apache.hugegraph.backend.query.IdPrefixQuery; import org.apache.hugegraph.backend.query.IdRangeQuery; import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; @@ -55,6 +52,7 @@ import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.AggregateType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; @@ -66,13 +64,15 @@ import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; + import com.google.common.collect.ImmutableMap; public class TextSerializer extends AbstractSerializer { private static final String VALUE_SPLITOR = TextBackendEntry.VALUE_SPLITOR; private static final String EDGE_NAME_ENDING = - ConditionQuery.INDEX_SYM_ENDING; + ConditionQuery.INDEX_SYM_ENDING; private static final String EDGE_OUT_TYPE = writeType(HugeType.EDGE_OUT); @@ -163,7 +163,7 @@ private void parseProperty(String colName, String colValue, } else { if (!(value instanceof Collection)) { throw new BackendException( - "Invalid value of non-single property: %s", colValue); + "Invalid value of non-single property: %s", colValue); } for (Object v : (Collection) value) { v = JsonUtil.castNumber(v, pkey.dataType().clazz()); @@ -313,7 +313,7 @@ public HugeVertex readVertex(HugeGraph graph, BackendEntry backendEntry) { HugeVertex vertex = new HugeVertex(graph, id, vertexLabel); String expiredTime = entry.column(this.formatSyspropName( - HugeKeys.EXPIRED_TIME)); + HugeKeys.EXPIRED_TIME)); // Expired time is null when backend entry is fake vertex with edges if (expiredTime != null) { vertex.expiredTime(readLong(expiredTime)); @@ -385,11 +385,11 @@ public HugeIndex readIndex(HugeGraph graph, ConditionQuery query, TextBackendEntry entry = this.convertEntry(backendEntry); String indexValues = entry.column( - formatSyspropName(HugeKeys.FIELD_VALUES)); + formatSyspropName(HugeKeys.FIELD_VALUES)); String indexLabelId = entry.column( - formatSyspropName(HugeKeys.INDEX_LABEL_ID)); + formatSyspropName(HugeKeys.INDEX_LABEL_ID)); String elemIds = entry.column( - formatSyspropName(HugeKeys.ELEMENT_IDS)); + formatSyspropName(HugeKeys.ELEMENT_IDS)); IndexLabel indexLabel = IndexLabel.label(graph, readId(indexLabelId)); HugeIndex index = new HugeIndex(graph, indexLabel); @@ -865,7 +865,7 @@ private static IdWithExpiredTime[] readElementIds(String str) { Map map = (Map) values[i]; idValue = map.get(HugeKeys.ID.string()); expiredTime = ((Number) map.get( - HugeKeys.EXPIRED_TIME.string())).longValue(); + HugeKeys.EXPIRED_TIME.string())).longValue(); } else { idValue = values[i]; expiredTime = 0L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java index 875027f1ef..53ea9b4e4f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java @@ -21,7 +21,7 @@ import org.apache.hugegraph.type.HugeType; public abstract class AbstractBackendStore - implements BackendStore { + implements BackendStore { // TODO: move SystemSchemaStore into backend like MetaStore private final SystemSchemaStore systemSchemaStore; @@ -43,7 +43,7 @@ public void registerMetaHandler(String name, MetaHandler handler) { @Override public String storedVersion() { throw new UnsupportedOperationException( - "AbstractBackendStore.storedVersion()"); + "AbstractBackendStore.storedVersion()"); } @Override @@ -68,8 +68,8 @@ public R metadata(HugeType type, String meta, Object[] args) { protected void checkOpened() throws ConnectionException { if (!this.opened()) { throw new ConnectionException( - "The '%s' store of %s has not been opened", - this.database(), this.provider().type()); + "The '%s' store of %s has not been opened", + this.database(), this.provider().type()); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStoreProvider.java index bfd9031ce5..906d795149 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStoreProvider.java @@ -21,11 +21,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; -import org.apache.hugegraph.backend.store.raft.StoreSnapshotFile; -import org.slf4j.Logger; - -import com.alipay.remoting.rpc.RpcServer; import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.store.raft.StoreSnapshotFile; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.event.EventHub; @@ -33,9 +30,12 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Events; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.alipay.remoting.rpc.RpcServer; public abstract class AbstractBackendStoreProvider - implements BackendStoreProvider { + implements BackendStoreProvider { private static final Logger LOG = Log.logger(AbstractBackendStoreProvider.class); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendEntryIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendEntryIterator.java index 4ffb3660a1..f13798dd63 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendEntryIterator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendEntryIterator.java @@ -213,7 +213,6 @@ protected PageState pageState() { @Override public void close() throws Exception { - return; } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java index 7b9100f0ec..64c4dfb6c7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java @@ -46,7 +46,8 @@ public BackendMutation(int initialCapacity) { /** * Add data entry with an action to collection `updates` - * @param entry the backend entry + * + * @param entry the backend entry * @param action operate action on the entry */ @Watched(prefix = "mutation") @@ -72,11 +73,11 @@ public void put(BackendEntry entry, Action action) { /** * The optimized scenes include but are not limited to: * 1.If you want to delete an entry, the other mutations previously - * can be ignored. + * can be ignored. * 2.As similar to the No.1 item, If you want to insert an entry, - * the other mutations previously also can be ignored. + * the other mutations previously also can be ignored. * 3.If you append an entry and then eliminate it, the new action - * can override the old one. + * can override the old one. */ @Watched(prefix = "mutation") private void optimizeUpdates(BackendEntry entry, Action action) { @@ -85,7 +86,7 @@ private void optimizeUpdates(BackendEntry entry, Action action) { final List items = this.updates.get(entry.type(), id); assert items != null; boolean ignoreCurrent = false; - for (Iterator iter = items.iterator(); iter.hasNext();) { + for (Iterator iter = items.iterator(); iter.hasNext(); ) { BackendAction originItem = iter.next(); Action originAction = originItem.action(); switch (action) { @@ -105,9 +106,9 @@ private void optimizeUpdates(BackendEntry entry, Action action) { if (entry.type().isUniqueIndex() && originAction == Action.APPEND) { throw new IllegalArgumentException(String.format( - "Unique constraint conflict is found in" + - " transaction between %s and %s", - entry, originItem.entry())); + "Unique constraint conflict is found in" + + " transaction between %s and %s", + entry, originItem.entry())); } if (originAction == Action.INSERT || @@ -137,7 +138,7 @@ private void optimizeUpdates(BackendEntry entry, Action action) { break; default: throw new AssertionError(String.format( - "Unknown mutate action: %s", action)); + "Unknown mutate action: %s", action)); } } if (!ignoreCurrent) { @@ -146,8 +147,8 @@ private void optimizeUpdates(BackendEntry entry, Action action) { } private static HugeException incompatibleActionException( - Action newAction, - Action originAction) { + Action newAction, + Action originAction) { return new HugeException("The action '%s' is incompatible with " + "action '%s'", newAction, originAction); } @@ -156,11 +157,12 @@ private static HugeException incompatibleActionException( * Merges another mutation into this mutation. Ensures that all additions * and deletions are added to this mutation. Does not remove duplicates * if such exist - this needs to be ensured by the caller. + * * @param mutation another mutation to be merged */ public void merge(BackendMutation mutation) { E.checkNotNull(mutation, "mutation"); - for (Iterator it = mutation.mutation(); it.hasNext();) { + for (Iterator it = mutation.mutation(); it.hasNext(); ) { BackendAction item = it.next(); this.add(item.entry(), item.action()); } @@ -172,6 +174,7 @@ public Set types() { /** * Get all mutations + * * @return mutations */ public Iterator mutation() { @@ -180,6 +183,7 @@ public Iterator mutation() { /** * Get mutations by type + * * @param type entry type * @return mutations */ @@ -189,8 +193,9 @@ public Iterator mutation(HugeType type) { /** * Get mutations by type and id + * * @param type entry type - * @param id entry id + * @param id entry id * @return mutations */ public List mutation(HugeType type, Id id) { @@ -199,7 +204,8 @@ public List mutation(HugeType type, Id id) { /** * Whether mutation contains entry and action - * @param entry entry + * + * @param entry entry * @param action action * @return true if exist, otherwise false */ @@ -218,12 +224,13 @@ public boolean contains(BackendEntry entry, Action action) { /** * Whether mutation contains type and action - * @param type type + * + * @param type type * @param action action * @return true if exist, otherwise false */ public boolean contains(HugeType type, Action action) { - for (Iterator i = this.updates.get(type); i.hasNext();) { + for (Iterator i = this.updates.get(type); i.hasNext(); ) { BackendAction entry = i.next(); if (entry.action() == action) { return true; @@ -234,6 +241,7 @@ public boolean contains(HugeType type, Action action) { /** * Whether this mutation is empty + * * @return true if empty, otherwise false */ public boolean isEmpty() { @@ -242,6 +250,7 @@ public boolean isEmpty() { /** * Get size of mutations + * * @return size */ public int size() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendProviderFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendProviderFactory.java index 6b804b74fa..bcfb31e4fc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendProviderFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendProviderFactory.java @@ -20,8 +20,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.store.memory.InMemoryDBStoreProvider; @@ -29,6 +27,7 @@ import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public class BackendProviderFactory { @@ -83,7 +82,7 @@ private static BackendStoreProvider newProvider(HugeConfig config) { return instance; } - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public static void register(String name, String classPath) { ClassLoader classLoader = BackendProviderFactory.class.getClassLoader(); Class clazz = null; @@ -96,7 +95,7 @@ public static void register(String name, String classPath) { // Check subclass boolean subclass = BackendStoreProvider.class.isAssignableFrom(clazz); BackendException.check(subclass, "Class '%s' is not a subclass of " + - "class BackendStoreProvider", classPath); + "class BackendStoreProvider", classPath); // Check exists BackendException.check(!providers.containsKey(name), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendSessionPool.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendSessionPool.java index c7e3411bd5..521f44556f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendSessionPool.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendSessionPool.java @@ -23,11 +23,10 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; - import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public abstract class BackendSessionPool { @@ -47,7 +46,7 @@ public BackendSessionPool(HugeConfig config, String name) { this.sessionCount = new AtomicInteger(0); this.sessions = new ConcurrentHashMap<>(); this.reconnectDetectInterval = this.config.get( - CoreOptions.STORE_CONN_DETECT_INTERVAL); + CoreOptions.STORE_CONN_DETECT_INTERVAL); } public HugeConfig config() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStore.java index aebce4be63..a136aab1a3 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStore.java @@ -106,7 +106,7 @@ default Id nextId(HugeType type) { } E.checkState(counter != 0L, "Please check whether '%s' is OK", - this.provider().type()); + this.provider().type()); E.checkState(counter == expect, "'%s' is busy please try again", this.provider().type()); @@ -175,6 +175,11 @@ default void resumeSnapshot(String snapshotDir, boolean deleteSnapshot) { } enum TxState { - BEGIN, COMMITTING, COMMITT_FAIL, ROLLBACKING, ROLLBACK_FAIL, CLEAN + BEGIN, + COMMITTING, + COMMITT_FAIL, + ROLLBACKING, + ROLLBACK_FAIL, + CLEAN } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java index 382356be64..ed3f36f94f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java @@ -18,9 +18,8 @@ package org.apache.hugegraph.backend.store; import org.apache.hugegraph.config.HugeConfig; -import org.slf4j.Logger; - import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public class BackendStoreInfo { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java index 9df4bfdc37..a819451034 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java @@ -17,11 +17,12 @@ package org.apache.hugegraph.backend.store; -import com.alipay.remoting.rpc.RpcServer; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.event.EventHub; import org.apache.hugegraph.event.EventListener; +import com.alipay.remoting.rpc.RpcServer; + public interface BackendStoreProvider { String SCHEMA_STORE = "m"; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendTable.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendTable.java index 8850272cfe..8715dc0e12 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendTable.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendTable.java @@ -275,8 +275,8 @@ public List splitEven(int count) { assert count > 1; byte[] each = align(new BigInteger(1, subtract(end, start)) - .divide(BigInteger.valueOf(count)) - .toByteArray(), + .divide(BigInteger.valueOf(count)) + .toByteArray(), length); byte[] offset = start; byte[] last = offset; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/TableDefine.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/TableDefine.java index f926119d0e..aae2793b92 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/TableDefine.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/TableDefine.java @@ -26,6 +26,7 @@ import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.InsertionOrderUtil; + import com.google.common.collect.ImmutableMap; public class TableDefine { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java index c3393ba4c7..825bbcff7d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java @@ -27,10 +27,6 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.serializer.TextBackendEntry; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.Action; -import org.slf4j.Logger; - import org.apache.hugegraph.backend.store.AbstractBackendStore; import org.apache.hugegraph.backend.store.BackendAction; import org.apache.hugegraph.backend.store.BackendEntry; @@ -39,7 +35,9 @@ import org.apache.hugegraph.backend.store.BackendSession; import org.apache.hugegraph.backend.store.BackendStoreProvider; import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; /** * NOTE: @@ -55,7 +53,7 @@ * 2.append/subtract edge-property */ public abstract class InMemoryDBStore - extends AbstractBackendStore { + extends AbstractBackendStore { private static final Logger LOG = Log.logger(InMemoryDBStore.class); @@ -127,7 +125,7 @@ public Number queryNumber(Query query) { @Override public void mutate(BackendMutation mutation) { - for (Iterator it = mutation.mutation(); it.hasNext();) { + for (Iterator it = mutation.mutation(); it.hasNext(); ) { this.mutate(it.next()); } } @@ -231,7 +229,7 @@ public void commitTx() { @Override public void rollbackTx() { throw new UnsupportedOperationException( - "Unsupported rollback operation by InMemoryDBStore"); + "Unsupported rollback operation by InMemoryDBStore"); } @Override @@ -342,19 +340,19 @@ public boolean isSchemaStore() { @Override public Id nextId(HugeType type) { throw new UnsupportedOperationException( - "InMemoryGraphStore.nextId()"); + "InMemoryGraphStore.nextId()"); } @Override public void increaseCounter(HugeType type, long num) { throw new UnsupportedOperationException( - "InMemoryGraphStore.increaseCounter()"); + "InMemoryGraphStore.increaseCounter()"); } @Override public long getCounter(HugeType type) { throw new UnsupportedOperationException( - "InMemoryGraphStore.getCounter()"); + "InMemoryGraphStore.getCounter()"); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStoreProvider.java index d3460e554f..aaf8e47001 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStoreProvider.java @@ -22,11 +22,11 @@ import org.apache.hugegraph.backend.store.AbstractBackendStoreProvider; import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.util.Events; import org.apache.hugegraph.backend.store.memory.InMemoryDBStore.InMemoryGraphStore; import org.apache.hugegraph.backend.store.memory.InMemoryDBStore.InMemorySchemaStore; import org.apache.hugegraph.backend.store.memory.InMemoryDBStore.InMemorySystemStore; import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.util.Events; public class InMemoryDBStoreProvider extends AbstractBackendStoreProvider { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTable.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTable.java index 1a6b3cef00..fb6ef7c021 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTable.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTable.java @@ -27,15 +27,6 @@ import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.serializer.TextBackendEntry; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendSession; -import org.apache.hugegraph.backend.store.BackendTable; -import org.apache.hugegraph.backend.store.Shard; -import org.apache.hugegraph.exception.NotSupportException; -import org.apache.hugegraph.type.HugeType; -import org.slf4j.Logger; - import org.apache.hugegraph.backend.query.Aggregate; import org.apache.hugegraph.backend.query.Aggregate.AggregateFunc; import org.apache.hugegraph.backend.query.Condition; @@ -44,14 +35,23 @@ import org.apache.hugegraph.backend.query.IdRangeQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.serializer.TextBackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendSession; +import org.apache.hugegraph.backend.store.BackendTable; +import org.apache.hugegraph.backend.store.Shard; +import org.apache.hugegraph.exception.NotSupportException; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; public class InMemoryDBTable extends BackendTable { + TextBackendEntry> { private static final Logger LOG = Log.logger(InMemoryDBTable.class); @@ -260,8 +260,8 @@ protected Map queryByIdRange(Id start, } protected Map queryByFilter( - Collection conditions, - Map entries) { + Collection conditions, + Map entries) { assert conditions.size() > 0; Map rs = new HashMap<>(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTables.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTables.java index e3ea17179e..0fdd36f09c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTables.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBTables.java @@ -32,23 +32,24 @@ import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.backend.serializer.TextBackendEntry; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; -import org.apache.hugegraph.backend.store.BackendSession; -import org.apache.hugegraph.structure.HugeIndex; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.Condition.RangeConditions; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.serializer.TextBackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; +import org.apache.hugegraph.backend.store.BackendSession; import org.apache.hugegraph.iterator.ExtendableIterator; +import org.apache.hugegraph.structure.HugeIndex; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.NumericUtil; + import com.google.common.collect.ImmutableList; public class InMemoryDBTables { @@ -103,18 +104,18 @@ public void eliminate(BackendSession session, TextBackendEntry entry) { @Override protected Map queryById( - Collection ids, - Map entries) { + Collection ids, + Map entries) { // Query edge(in a vertex) by id return this.queryEdgeById(ids, false, entries); } @Override protected Map queryByIdPrefix( - Id start, - boolean inclusiveStart, - Id prefix, - Map entries) { + Id start, + boolean inclusiveStart, + Id prefix, + Map entries) { // Query edge(in a vertex) by v-id + column-name-prefix BackendEntry value = this.getEntryById(start, entries); if (value == null) { @@ -145,11 +146,11 @@ protected Map queryByIdPrefix( @Override protected Map queryByIdRange( - Id start, - boolean inclusiveStart, - Id end, - boolean inclusiveEnd, - Map entries) { + Id start, + boolean inclusiveStart, + Id end, + boolean inclusiveEnd, + Map entries) { BackendEntry value = this.getEntryById(start, entries); if (value == null) { return Collections.emptyMap(); @@ -178,8 +179,8 @@ protected Map queryByIdRange( } private Map queryEdgeById( - Collection ids, boolean prefix, - Map entries) { + Collection ids, boolean prefix, + Map entries) { assert ids.size() > 0; Map rs = InsertionOrderUtil.newMap(); @@ -195,7 +196,7 @@ private Map queryEdgeById( } else if ((!prefix && entry.contains(column)) || (prefix && entry.containsPrefix(column))) { BackendEntry edges = new TextBackendEntry( - HugeType.VERTEX, entry.id()); + HugeType.VERTEX, entry.id()); if (prefix) { // Some edges with specified prefix in the vertex edges.columns(entry.columnsWithPrefix(column)); @@ -229,8 +230,8 @@ private BackendEntry getEntryById(Id id, @Override protected Map queryByFilter( - Collection conditions, - Map entries) { + Collection conditions, + Map entries) { if (conditions.isEmpty()) { return entries; } @@ -405,7 +406,7 @@ public void delete(BackendSession session, TextBackendEntry entry) { E.checkState(indexLabel != null, "Expect index label"); Iterator> iter; - for (iter = this.store().entrySet().iterator(); iter.hasNext();) { + for (iter = this.store().entrySet().iterator(); iter.hasNext(); ) { Entry e = iter.next(); // Delete if prefix with index label if (e.getKey().asString().startsWith(indexLabel)) { @@ -535,7 +536,7 @@ public void delete(BackendSession session, TextBackendEntry entry) { SortedMap subStore; subStore = this.store().subMap(min, max); Iterator> iter; - for (iter = subStore.entrySet().iterator(); iter.hasNext();) { + for (iter = subStore.entrySet().iterator(); iter.hasNext(); ) { iter.next(); // Delete if prefix with index label iter.remove(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryMetrics.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryMetrics.java index 9625456850..9e7caa38ed 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryMetrics.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryMetrics.java @@ -20,6 +20,7 @@ import java.util.Map; import org.apache.hugegraph.backend.store.BackendMetrics; + import com.google.common.collect.ImmutableMap; public class InMemoryMetrics implements BackendMetrics { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStore.java index 1f92cdb0b4..bf89c6b17a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStore.java @@ -22,11 +22,6 @@ import java.util.List; import java.util.function.Function; -import org.slf4j.Logger; - -import com.alipay.sofa.jraft.Status; -import com.alipay.sofa.jraft.closure.ReadIndexClosure; -import com.alipay.sofa.jraft.util.BytesUtil; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; @@ -41,6 +36,11 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.alipay.sofa.jraft.Status; +import com.alipay.sofa.jraft.closure.ReadIndexClosure; +import com.alipay.sofa.jraft.util.BytesUtil; public class RaftBackendStore implements BackendStore { @@ -149,13 +149,13 @@ public void mutate(BackendMutation mutation) { @SuppressWarnings("unchecked") public Iterator query(Query query) { return (Iterator) - this.queryByRaft(query, o -> this.store.query(query)); + this.queryByRaft(query, o -> this.store.query(query)); } @Override public Number queryNumber(Query query) { return (Number) - this.queryByRaft(query, o -> this.store.queryNumber(query)); + this.queryByRaft(query, o -> this.store.queryNumber(query)); } @Override @@ -231,8 +231,8 @@ public void run(Status status, long index, byte[] reqCtx) { future.complete(status, () -> func.apply(query)); } else { future.failure(status, new BackendException( - "Failed to do raft read-index: %s", - status)); + "Failed to do raft read-index: %s", + status)); } } }; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStoreProvider.java index 32688da6c1..ecdff19bd5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftBackendStoreProvider.java @@ -20,9 +20,6 @@ import java.util.Set; import java.util.concurrent.Future; -import org.slf4j.Logger; - -import com.alipay.remoting.rpc.RpcServer; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.store.BackendStore; @@ -35,6 +32,9 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Events; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.alipay.remoting.rpc.RpcServer; import com.google.common.collect.ImmutableSet; public class RaftBackendStoreProvider implements BackendStoreProvider { @@ -47,6 +47,7 @@ public class RaftBackendStoreProvider implements BackendStoreProvider { private RaftBackendStore schemaStore; private RaftBackendStore graphStore; private RaftBackendStore systemStore; + public RaftBackendStoreProvider(HugeGraphParams params, BackendStoreProvider provider) { this.provider = provider; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftClosure.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftClosure.java index 2f9479e138..bd4e45da42 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftClosure.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftClosure.java @@ -23,12 +23,12 @@ import java.util.concurrent.TimeoutException; import java.util.function.Supplier; +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.Closure; import com.alipay.sofa.jraft.Status; -import org.apache.hugegraph.backend.BackendException; -import org.apache.hugegraph.util.Log; public class RaftClosure implements Closure { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftContext.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftContext.java index 852b56423d..a70cd3022d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftContext.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftContext.java @@ -184,11 +184,11 @@ public RaftNode node() { return this.raftNode; } - protected RpcServer rpcServer() { + RpcServer rpcServer() { return this.raftRpcServer; } - protected RpcForwarder rpcForwarder() { + RpcForwarder rpcForwarder() { return this.rpcForwarder; } @@ -216,7 +216,7 @@ public StoreType storeType(String store) { } } - protected RaftBackendStore[] stores() { + RaftBackendStore[] stores() { return this.stores; } @@ -233,13 +233,13 @@ public NodeOptions nodeOptions() throws IOException { NodeOptions nodeOptions = new NodeOptions(); nodeOptions.setEnableMetrics(false); nodeOptions.setRpcProcessorThreadPoolSize( - config.get(CoreOptions.RAFT_RPC_THREADS)); + config.get(CoreOptions.RAFT_RPC_THREADS)); nodeOptions.setRpcConnectTimeoutMs( - config.get(CoreOptions.RAFT_RPC_CONNECT_TIMEOUT)); + config.get(CoreOptions.RAFT_RPC_CONNECT_TIMEOUT)); nodeOptions.setRpcDefaultTimeout( - 1000 * config.get(CoreOptions.RAFT_RPC_TIMEOUT)); + 1000 * config.get(CoreOptions.RAFT_RPC_TIMEOUT)); nodeOptions.setRpcInstallSnapshotTimeout( - 1000 * config.get(CoreOptions.RAFT_INSTALL_SNAPSHOT_TIMEOUT)); + 1000 * config.get(CoreOptions.RAFT_INSTALL_SNAPSHOT_TIMEOUT)); int electionTimeout = config.get(CoreOptions.RAFT_ELECTION_TIMEOUT); nodeOptions.setElectionTimeoutMs(electionTimeout); @@ -269,27 +269,27 @@ public NodeOptions nodeOptions() throws IOException { */ raftOptions.setApplyBatch(config.get(CoreOptions.RAFT_APPLY_BATCH)); raftOptions.setDisruptorBufferSize( - config.get(CoreOptions.RAFT_QUEUE_SIZE)); + config.get(CoreOptions.RAFT_QUEUE_SIZE)); raftOptions.setDisruptorPublishEventWaitTimeoutSecs( - config.get(CoreOptions.RAFT_QUEUE_PUBLISH_TIMEOUT)); + config.get(CoreOptions.RAFT_QUEUE_PUBLISH_TIMEOUT)); raftOptions.setReplicatorPipeline( - config.get(CoreOptions.RAFT_REPLICATOR_PIPELINE)); + config.get(CoreOptions.RAFT_REPLICATOR_PIPELINE)); raftOptions.setOpenStatistics(false); raftOptions.setReadOnlyOptions( - ReadOnlyOption.valueOf( - config.get(CoreOptions.RAFT_READ_STRATEGY))); + ReadOnlyOption.valueOf( + config.get(CoreOptions.RAFT_READ_STRATEGY))); return nodeOptions; } - protected void clearCache() { + void clearCache() { // Just choose two representatives used to represent schema and graph this.notifyCache(Cache.ACTION_CLEAR, HugeType.VERTEX_LABEL, null); this.notifyCache(Cache.ACTION_CLEAR, HugeType.VERTEX, null); } - protected void updateCacheIfNeeded(BackendMutation mutation, - boolean forwarded) { + void updateCacheIfNeeded(BackendMutation mutation, + boolean forwarded) { // Update cache only when graph run in general mode if (this.graphMode() != GraphMode.NONE) { return; @@ -317,7 +317,7 @@ protected void updateCacheIfNeeded(BackendMutation mutation, } } - protected void notifyCache(String action, HugeType type, List ids) { + private void notifyCache(String action, HugeType type, List ids) { EventHub eventHub; if (type.isGraph()) { eventHub = this.params.graphEventHub(); @@ -373,18 +373,18 @@ private HugeConfig config() { @SuppressWarnings("unused") private RpcServer initAndStartRpcServer() { Integer lowWaterMark = this.config().get( - CoreOptions.RAFT_RPC_BUF_LOW_WATER_MARK); + CoreOptions.RAFT_RPC_BUF_LOW_WATER_MARK); System.setProperty("bolt.channel_write_buf_low_water_mark", String.valueOf(lowWaterMark)); Integer highWaterMark = this.config().get( - CoreOptions.RAFT_RPC_BUF_HIGH_WATER_MARK); + CoreOptions.RAFT_RPC_BUF_HIGH_WATER_MARK); System.setProperty("bolt.channel_write_buf_high_water_mark", String.valueOf(highWaterMark)); PeerId endpoint = this.endpoint(); NodeManager.getInstance().addAddress(endpoint.getEndpoint()); RpcServer rpcServer = RaftRpcServerFactory.createAndStartRaftRpcServer( - endpoint.getEndpoint()); + endpoint.getEndpoint()); LOG.info("Raft-RPC server is started successfully"); return rpcServer; } @@ -392,11 +392,11 @@ private RpcServer initAndStartRpcServer() { private RpcServer wrapRpcServer(com.alipay.remoting.rpc.RpcServer rpcServer) { // TODO: pass ServerOptions instead of CoreOptions, to share by graphs Integer lowWaterMark = this.config().get( - CoreOptions.RAFT_RPC_BUF_LOW_WATER_MARK); + CoreOptions.RAFT_RPC_BUF_LOW_WATER_MARK); System.setProperty("bolt.channel_write_buf_low_water_mark", String.valueOf(lowWaterMark)); Integer highWaterMark = this.config().get( - CoreOptions.RAFT_RPC_BUF_HIGH_WATER_MARK); + CoreOptions.RAFT_RPC_BUF_HIGH_WATER_MARK); System.setProperty("bolt.channel_write_buf_high_water_mark", String.valueOf(highWaterMark)); @@ -439,7 +439,7 @@ private ExecutorService createSnapshotExecutor(int coreThreads) { private ExecutorService createBackendExecutor(int threads) { String name = "store-backend-executor"; RejectedExecutionHandler handler = - new ThreadPoolExecutor.CallerRunsPolicy(); + new ThreadPoolExecutor.CallerRunsPolicy(); return newPool(threads, threads, name, handler); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftException.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftException.java index cce4db182d..85e6915bf8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftException.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftException.java @@ -45,7 +45,7 @@ public RaftException(Throwable cause) { public static final void check(boolean expression, String message, Object... args) - throws RaftException { + throws RaftException { if (!expression) { throw new RaftException(message, args); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftGroupManagerImpl.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftGroupManagerImpl.java index ee1e328708..0f231d734c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftGroupManagerImpl.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftGroupManagerImpl.java @@ -20,21 +20,19 @@ import java.util.List; import java.util.stream.Collectors; -import com.alipay.sofa.jraft.Node; -import com.alipay.sofa.jraft.Status; -import com.alipay.sofa.jraft.entity.PeerId; -import org.apache.hugegraph.backend.store.raft.rpc.RpcForwarder; import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.AddPeerRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.ListPeersRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.ListPeersResponse; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.RemovePeerRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.SetLeaderRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.SetLeaderResponse; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.AddPeerRequest; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.AddPeerResponse; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.RemovePeerRequest; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.RemovePeerResponse; - +import org.apache.hugegraph.backend.store.raft.rpc.RpcForwarder; import org.apache.hugegraph.util.E; + +import com.alipay.sofa.jraft.Node; +import com.alipay.sofa.jraft.Status; +import com.alipay.sofa.jraft.entity.PeerId; import com.google.protobuf.Message; public class RaftGroupManagerImpl implements RaftGroupManager { @@ -87,8 +85,8 @@ public String transferLeaderTo(String endpoint) { Status status = this.raftNode.node().transferLeadershipTo(peerId); if (!status.isOk()) { throw new BackendException( - "Failed to transfer leader to '%s', raft error: %s", - endpoint, status.getErrorMsg()); + "Failed to transfer leader to '%s', raft error: %s", + endpoint, status.getErrorMsg()); } return peerId.toString(); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftNode.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftNode.java index 6f703f5c53..a8e07b4114 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftNode.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftNode.java @@ -24,6 +24,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.util.LZ4Util; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.Node; @@ -37,9 +40,6 @@ import com.alipay.sofa.jraft.option.NodeOptions; import com.alipay.sofa.jraft.rpc.RpcServer; import com.alipay.sofa.jraft.util.BytesUtil; -import org.apache.hugegraph.backend.BackendException; -import org.apache.hugegraph.util.LZ4Util; -import org.apache.hugegraph.util.Log; public final class RaftNode { @@ -69,11 +69,11 @@ public RaftNode(RaftContext context) { this.busyCounter = new AtomicInteger(); } - protected RaftContext context() { + private RaftContext context() { return this.context; } - protected Node node() { + Node node() { assert this.node != null; return this.node; } @@ -105,7 +105,7 @@ public void shutdown() { this.raftGroupService.join(); } catch (final InterruptedException e) { throw new RaftException( - "Interrupted while shutdown raftGroupService"); + "Interrupted while shutdown raftGroupService"); } } } @@ -147,7 +147,7 @@ public T submitAndWait(StoreCommand command, RaftStoreClosure future) { private void submitCommand(StoreCommand command, RaftStoreClosure future) { // Wait leader elected LeaderInfo leaderInfo = this.waitLeaderElected( - RaftContext.WAIT_LEADER_TIMEOUT); + RaftContext.WAIT_LEADER_TIMEOUT); // If myself is not leader, forward to the leader if (!leaderInfo.selfIsLeader) { this.context.rpcForwarder().forwardToLeader(leaderInfo.leaderId, @@ -171,7 +171,7 @@ private void submitCommand(StoreCommand command, RaftStoreClosure future) { this.node.apply(task); } - protected LeaderInfo waitLeaderElected(int timeout) { + LeaderInfo waitLeaderElected(int timeout) { String group = this.context.group(); LeaderInfo leaderInfo = this.leaderInfo.get(); if (leaderInfo.leaderId != null) { @@ -189,8 +189,8 @@ protected LeaderInfo waitLeaderElected(int timeout) { long consumedTime = System.currentTimeMillis() - beginTime; if (timeout > 0 && consumedTime >= timeout) { throw new BackendException( - "Waiting for raft group '%s' election timeout(%sms)", - group, consumedTime); + "Waiting for raft group '%s' election timeout(%sms)", + group, consumedTime); } leaderInfo = this.leaderInfo.get(); assert leaderInfo != null; @@ -199,7 +199,7 @@ protected LeaderInfo waitLeaderElected(int timeout) { return leaderInfo; } - protected void waitRaftLogSynced(int timeout) { + void waitRaftLogSynced(int timeout) { String group = this.context.group(); LOG.info("Waiting for raft group '{}' log synced", group); long beginTime = System.currentTimeMillis(); @@ -219,8 +219,8 @@ public void run(Status status, long index, byte[] reqCtx) { long consumedTime = System.currentTimeMillis() - beginTime; if (timeout > 0 && consumedTime >= timeout) { throw new BackendException( - "Waiting for raft group '%s' log synced timeout(%sms)", - group, consumedTime); + "Waiting for raft group '%s' log synced timeout(%sms)", + group, consumedTime); } } LOG.info("Waited for raft group '{}' log synced successfully", group); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftResult.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftResult.java index ad9c397027..74f8f8b485 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftResult.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/RaftResult.java @@ -19,9 +19,10 @@ import java.util.function.Supplier; -import com.alipay.sofa.jraft.Status; import org.apache.hugegraph.util.E; +import com.alipay.sofa.jraft.Status; + public final class RaftResult { private final Status status; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSerializer.java index 8c6c4fc7d4..69937737da 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSerializer.java @@ -68,7 +68,7 @@ public static byte[] writeMutation(BackendMutation mutation) { // write mutation size buffer.writeVInt(mutation.size()); for (Iterator items = mutation.mutation(); - items.hasNext();) { + items.hasNext(); ) { BackendAction item = items.next(); // write Action buffer.write(item.action().code()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSnapshotFile.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSnapshotFile.java index f59899fb4d..dd9e6b8914 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSnapshotFile.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreSnapshotFile.java @@ -218,7 +218,7 @@ private String decompressSnapshot(SnapshotReader reader, archiveFile, parentPath, (end - begin) / 1000.0F); } catch (Throwable e) { throw new RaftException( - "Failed to decompress snapshot, zip=%s", e, archiveFile); + "Failed to decompress snapshot, zip=%s", e, archiveFile); } if (meta.hasChecksum()) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreStateMachine.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreStateMachine.java index 81cb1af09d..df54a8276d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreStateMachine.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/StoreStateMachine.java @@ -23,6 +23,16 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.serializer.BytesBuffer; +import org.apache.hugegraph.backend.store.BackendMutation; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.backend.store.raft.RaftBackendStore.IncrCounter; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreAction; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreType; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.LZ4Util; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.Closure; @@ -35,16 +45,6 @@ import com.alipay.sofa.jraft.error.RaftException; import com.alipay.sofa.jraft.storage.snapshot.SnapshotReader; import com.alipay.sofa.jraft.storage.snapshot.SnapshotWriter; -import org.apache.hugegraph.backend.BackendException; -import org.apache.hugegraph.backend.serializer.BytesBuffer; -import org.apache.hugegraph.backend.store.BackendMutation; -import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.backend.store.raft.RaftBackendStore.IncrCounter; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreAction; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreType; -import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.LZ4Util; -import org.apache.hugegraph.util.Log; public final class StoreStateMachine extends StateMachineAdapter { @@ -165,7 +165,7 @@ private Object applyCommand(StoreType type, StoreAction action, break; case COMMIT_TX: List mutations = StoreSerializer.readMutations( - buffer); + buffer); // RaftBackendStore doesn't write raft log for beginTx store.beginTx(); for (BackendMutation mutation : mutations) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/CompressStrategyManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/CompressStrategyManager.java index 640807c9d5..2e17c456d1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/CompressStrategyManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/CompressStrategyManager.java @@ -56,10 +56,10 @@ public static void init(final HugeConfig config) { // add parallel compress strategy if (compressStrategies[PARALLEL_STRATEGY] == null) { CompressStrategy compressStrategy = new ParallelCompressStrategy( - config.get(CoreOptions.RAFT_SNAPSHOT_COMPRESS_THREADS), - config.get(CoreOptions.RAFT_SNAPSHOT_DECOMPRESS_THREADS)); + config.get(CoreOptions.RAFT_SNAPSHOT_COMPRESS_THREADS), + config.get(CoreOptions.RAFT_SNAPSHOT_DECOMPRESS_THREADS)); CompressStrategyManager.addCompressStrategy( - CompressStrategyManager.PARALLEL_STRATEGY, compressStrategy); + CompressStrategyManager.PARALLEL_STRATEGY, compressStrategy); DEFAULT_STRATEGY = PARALLEL_STRATEGY; } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/ParallelCompressStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/ParallelCompressStrategy.java index 040bebb68c..09b2b3e9e6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/ParallelCompressStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/compress/ParallelCompressStrategy.java @@ -104,10 +104,10 @@ public void compressZip(String rootDir, String sourceDir, String outputZipFile, FileUtils.forceMkdir(zipFile.getParentFile()); ExecutorService compressExecutor = - newFixedPool(compressThreads, compressThreads, "raft-snapshot-compress-executor", - new ThreadPoolExecutor.CallerRunsPolicy()); + newFixedPool(compressThreads, compressThreads, "raft-snapshot-compress-executor", + new ThreadPoolExecutor.CallerRunsPolicy()); ZipArchiveScatterOutputStream scatterOutput = - new ZipArchiveScatterOutputStream(compressExecutor); + new ZipArchiveScatterOutputStream(compressExecutor); compressDirectoryToZipFile(rootFile, scatterOutput, sourceDir, ZipEntry.DEFLATED); try (FileOutputStream fos = new FileOutputStream(zipFile); @@ -127,9 +127,9 @@ public void decompressZip(String sourceZipFile, String outputDir, Checksum checksum) throws Throwable { LOG.info("Start to decompress snapshot in parallel mode"); ExecutorService decompressExecutor = - newFixedPool(decompressThreads, decompressThreads, - "raft-snapshot-decompress-executor", - new ThreadPoolExecutor.CallerRunsPolicy()); + newFixedPool(decompressThreads, decompressThreads, + "raft-snapshot-decompress-executor", + new ThreadPoolExecutor.CallerRunsPolicy()); // compute the checksum in a single thread Future checksumFuture = decompressExecutor.submit(() -> { computeZipFileChecksumValue(sourceZipFile, checksum); @@ -209,7 +209,7 @@ private void unZipFile(ZipFile zipFile, ZipArchiveEntry entry, try (InputStream is = zipFile.getInputStream(entry); BufferedInputStream fis = new BufferedInputStream(is); BufferedOutputStream bos = - new BufferedOutputStream(Files.newOutputStream(targetFile.toPath()))) { + new BufferedOutputStream(Files.newOutputStream(targetFile.toPath()))) { IOUtils.copy(fis, bos); } } @@ -219,7 +219,7 @@ private void unZipFile(ZipFile zipFile, ZipArchiveEntry entry, */ private void computeZipFileChecksumValue(String zipPath, Checksum checksum) throws Exception { try (BufferedInputStream bis = - new BufferedInputStream(Files.newInputStream(Paths.get(zipPath))); + new BufferedInputStream(Files.newInputStream(Paths.get(zipPath))); CheckedInputStream cis = new CheckedInputStream(bis, checksum); ZipArchiveInputStream zis = new ZipArchiveInputStream(cis)) { // checksum is calculated in the process diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/AddPeerProcessor.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/AddPeerProcessor.java index d1bdbf004a..275de3fc23 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/AddPeerProcessor.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/AddPeerProcessor.java @@ -17,19 +17,20 @@ package org.apache.hugegraph.backend.store.raft.rpc; -import com.alipay.sofa.jraft.rpc.RpcRequestClosure; -import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; -import com.google.protobuf.Message; import org.apache.hugegraph.backend.store.raft.RaftContext; import org.apache.hugegraph.backend.store.raft.RaftGroupManager; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.AddPeerRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.AddPeerResponse; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.util.Log; import org.slf4j.Logger; +import com.alipay.sofa.jraft.rpc.RpcRequestClosure; +import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; +import com.google.protobuf.Message; + public class AddPeerProcessor - extends RpcRequestProcessor { + extends RpcRequestProcessor { private static final Logger LOG = Log.logger(AddPeerProcessor.class); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/ListPeersProcessor.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/ListPeersProcessor.java index 807092899c..ebe96b7a74 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/ListPeersProcessor.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/ListPeersProcessor.java @@ -17,21 +17,21 @@ package org.apache.hugegraph.backend.store.raft.rpc; -import org.slf4j.Logger; - -import com.alipay.sofa.jraft.rpc.RpcRequestClosure; -import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; -import org.apache.hugegraph.backend.store.raft.RaftGroupManager; import org.apache.hugegraph.backend.store.raft.RaftContext; +import org.apache.hugegraph.backend.store.raft.RaftGroupManager; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.ListPeersRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.ListPeersResponse; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.alipay.sofa.jraft.rpc.RpcRequestClosure; +import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; import com.google.common.collect.ImmutableList; import com.google.protobuf.Message; public class ListPeersProcessor - extends RpcRequestProcessor { + extends RpcRequestProcessor { private static final Logger LOG = Log.logger(ListPeersProcessor.class); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RemovePeerProcessor.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RemovePeerProcessor.java index fd2ba2eac2..31d10f41d4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RemovePeerProcessor.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RemovePeerProcessor.java @@ -17,19 +17,20 @@ package org.apache.hugegraph.backend.store.raft.rpc; -import com.alipay.sofa.jraft.rpc.RpcRequestClosure; -import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; -import com.google.protobuf.Message; import org.apache.hugegraph.backend.store.raft.RaftContext; import org.apache.hugegraph.backend.store.raft.RaftGroupManager; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.RemovePeerRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.RemovePeerResponse; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.util.Log; import org.slf4j.Logger; +import com.alipay.sofa.jraft.rpc.RpcRequestClosure; +import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; +import com.google.protobuf.Message; + public class RemovePeerProcessor - extends RpcRequestProcessor { + extends RpcRequestProcessor { private static final Logger LOG = Log.logger(RemovePeerProcessor.class); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RpcForwarder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RpcForwarder.java index a9f9d8ee69..0944f5cc39 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RpcForwarder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/RpcForwarder.java @@ -19,8 +19,16 @@ import java.util.concurrent.ExecutionException; +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.store.raft.RaftClosure; +import org.apache.hugegraph.backend.store.raft.RaftContext; import org.apache.hugegraph.backend.store.raft.RaftStoreClosure; import org.apache.hugegraph.backend.store.raft.StoreCommand; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandRequest; +import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandResponse; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.Node; @@ -31,14 +39,6 @@ import com.alipay.sofa.jraft.rpc.RaftClientService; import com.alipay.sofa.jraft.rpc.RpcResponseClosure; import com.alipay.sofa.jraft.util.Endpoint; -import org.apache.hugegraph.backend.BackendException; -import org.apache.hugegraph.backend.store.raft.RaftClosure; -import org.apache.hugegraph.backend.store.raft.RaftContext; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandRequest; -import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandResponse; -import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.Log; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.Message; import com.google.protobuf.ZeroByteStringHelper; @@ -77,10 +77,13 @@ public void forwardToLeader(PeerId leaderId, StoreCommand command, public void setResponse(StoreCommandResponse response) { if (response.getStatus()) { LOG.debug("StoreCommandResponse status ok"); - // This code forwards the request to the Raft leader and considers the operation successful + // This code forwards the request to the Raft leader and considers the + // operation successful // if it's forwarded successfully. It returns a RaftClosure because the calling - // logic expects a RaftClosure result. Specifically, if the current instance is the Raft leader, - // it executes the corresponding logic locally and notifies the calling logic asynchronously + // logic expects a RaftClosure result. Specifically, if the current instance + // is the Raft leader, + // it executes the corresponding logic locally and notifies the calling logic + // asynchronously // via RaftClosure. Therefore, the result is returned as a RaftClosure here. RaftClosure supplierFuture = new RaftClosure<>(); supplierFuture.complete(Status.OK()); @@ -90,10 +93,10 @@ public void setResponse(StoreCommandResponse response) { Status status = new Status(RaftError.UNKNOWN, "fowared request failed"); BackendException e = new BackendException( - "Current node isn't leader, leader " + - "is [%s], failed to forward request " + - "to leader: %s", - leaderId, response.getMessage()); + "Current node isn't leader, leader " + + "is [%s], failed to forward request " + + "to leader: %s", + leaderId, response.getMessage()); future.failure(status, e); } } @@ -133,10 +136,10 @@ public void setResponse(T response) { Status status = new Status(RaftError.UNKNOWN, "fowared request failed"); BackendException e = new BackendException( - "Current node isn't leader, leader " + - "is [%s], failed to forward request " + - "to leader: %s", - leaderId, commonResponse.getMessage()); + "Current node isn't leader, leader " + + "is [%s], failed to forward request " + + "to leader: %s", + leaderId, commonResponse.getMessage()); future.failure(status, e); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/SetLeaderProcessor.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/SetLeaderProcessor.java index d9fa36323d..58589fe269 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/SetLeaderProcessor.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/SetLeaderProcessor.java @@ -22,15 +22,15 @@ import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.CommonResponse; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.SetLeaderRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.SetLeaderResponse; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.rpc.RpcRequestClosure; import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; -import org.apache.hugegraph.util.Log; import com.google.protobuf.Message; public class SetLeaderProcessor - extends RpcRequestProcessor { + extends RpcRequestProcessor { private static final Logger LOG = Log.logger(SetLeaderProcessor.class); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/StoreCommandProcessor.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/StoreCommandProcessor.java index 8739c22691..e8863c5129 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/StoreCommandProcessor.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/raft/rpc/StoreCommandProcessor.java @@ -25,18 +25,18 @@ import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandRequest; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreCommandResponse; import org.apache.hugegraph.backend.store.raft.rpc.RaftRequests.StoreType; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; import com.alipay.sofa.jraft.rpc.RpcRequestClosure; import com.alipay.sofa.jraft.rpc.RpcRequestProcessor; -import org.apache.hugegraph.util.Log; import com.google.protobuf.Message; public class StoreCommandProcessor - extends RpcRequestProcessor { + extends RpcRequestProcessor { private static final Logger LOG = Log.logger( - StoreCommandProcessor.class); + StoreCommandProcessor.class); private final RaftContext context; @@ -60,8 +60,8 @@ public Message processRequest(StoreCommandRequest request, LOG.warn("Failed to process StoreCommandRequest: {}", request.getAction(), e); StoreCommandResponse.Builder builder = StoreCommandResponse - .newBuilder() - .setStatus(false); + .newBuilder() + .setStatus(false); if (e.getMessage() != null) { builder.setMessage(e.getMessage()); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/RamTable.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/RamTable.java index 0729f4b80f..f71c44acd1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/RamTable.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/RamTable.java @@ -75,8 +75,8 @@ public final class RamTable { private static final int NULL = 0; private static final Condition BOTH_COND = Condition.or( - Condition.eq(HugeKeys.DIRECTION, Directions.OUT), - Condition.eq(HugeKeys.DIRECTION, Directions.IN)); + Condition.eq(HugeKeys.DIRECTION, Directions.OUT), + Condition.eq(HugeKeys.DIRECTION, Directions.IN)); private final HugeGraph graph; private final long verticesCapacity; @@ -145,7 +145,7 @@ private void loadFromFile(String fileName) throws Exception { File file = Paths.get(EXPORT_PATH, fileName).toFile(); if (!file.exists() || !file.isFile() || !file.canRead()) { throw new IllegalArgumentException(String.format( - "File '%s' does not existed or readable", fileName)); + "File '%s' does not existed or readable", fileName)); } try (FileInputStream fis = new FileInputStream(file); BufferedInputStream bis = new BufferedInputStream(fis); @@ -301,7 +301,7 @@ public Iterator query(Query query) { assert this.edgesSize() > 0; List cqs = ConditionQueryFlatten.flatten( - (ConditionQuery) query); + (ConditionQuery) query); if (cqs.size() == 1) { ConditionQuery cq = cqs.get(0); return this.query(cq); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/AbstractTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/AbstractTransaction.java index 5611a43e11..43393252d2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/AbstractTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/AbstractTransaction.java @@ -20,17 +20,15 @@ import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.apache.hugegraph.backend.query.IdQuery; -import org.apache.hugegraph.backend.query.Query; -import org.apache.hugegraph.backend.query.QueryResults; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.Transaction; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.IdQuery; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.serializer.AbstractSerializer; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.backend.store.BackendEntryIterator; @@ -47,6 +45,8 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; import org.apache.hugegraph.util.collection.IdSet; +import org.slf4j.Logger; + import com.google.common.util.concurrent.RateLimiter; public abstract class AbstractTransaction implements Transaction { @@ -192,8 +192,8 @@ public BackendEntry get(HugeType type, Id id) { BackendEntry entry = this.query(type, id); if (entry == null) { throw new NotFoundException( - "Not found the %s entry with id '%s'", - type.readableName(), id); + "Not found the %s entry with id '%s'", + type.readableName(), id); } return entry; } @@ -387,7 +387,7 @@ public void commitOrRollback() { */ throw new BackendException("Failed to commit changes: %s(%s)", StringUtils.abbreviateMiddle( - e1.getMessage(), ".", 256), + e1.getMessage(), ".", 256), HugeException.rootCause(e1)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java index 090d4268b5..c11b80fb37 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java @@ -32,15 +32,6 @@ import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.apache.hugegraph.backend.page.PageIds; -import org.apache.hugegraph.backend.page.PageState; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.task.EphemeralJobQueue; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; @@ -51,7 +42,9 @@ import org.apache.hugegraph.backend.page.IdHolder.FixedIdHolder; import org.apache.hugegraph.backend.page.IdHolder.PagingIdHolder; import org.apache.hugegraph.backend.page.IdHolderList; +import org.apache.hugegraph.backend.page.PageIds; import org.apache.hugegraph.backend.page.PageInfo; +import org.apache.hugegraph.backend.page.PageState; import org.apache.hugegraph.backend.page.SortByCountIdHolderList; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.Condition.RangeConditions; @@ -63,6 +56,8 @@ import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.serializer.AbstractSerializer; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.exception.NoIndexException; @@ -81,6 +76,7 @@ import org.apache.hugegraph.structure.HugeIndex.IdWithExpiredTime; import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.task.EphemeralJobQueue; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Action; import org.apache.hugegraph.type.define.HugeKeys; @@ -91,6 +87,10 @@ import org.apache.hugegraph.util.LockUtil; import org.apache.hugegraph.util.LongEncoding; import org.apache.hugegraph.util.NumericUtil; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -111,7 +111,7 @@ public GraphIndexTransaction(HugeGraphParams graph, BackendStore store) { final HugeConfig conf = graph.configuration(); this.indexIntersectThresh = - conf.get(CoreOptions.QUERY_INDEX_INTERSECT_THRESHOLD); + conf.get(CoreOptions.QUERY_INDEX_INTERSECT_THRESHOLD); } protected void asyncRemoveIndexLeft(ConditionQuery query, @@ -186,9 +186,10 @@ private void updateVertexOlapIndex(HugeVertex vertex, boolean removed) { /** * Update index(user properties) of vertex or edge - * @param ilId the id of index label - * @param element the properties owner - * @param removed remove or add index + * + * @param ilId the id of index label + * @param element the properties owner + * @param removed remove or add index */ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { SchemaTransaction schema = this.params().schemaTransaction(); @@ -242,7 +243,7 @@ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { "Expect only one property in search index"); value = nnPropValues.get(0); Set words = - this.segmentWords(propertyValueToString(value)); + this.segmentWords(propertyValueToString(value)); for (String word : words) { this.updateIndex(indexLabel, word, element.id(), expiredTime, removed); @@ -263,7 +264,7 @@ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { } else { for (int i = 0, n = nnPropValues.size(); i < n; i++) { List prefixValues = - nnPropValues.subList(0, i + 1); + nnPropValues.subList(0, i + 1); value = ConditionQuery.concatValues(prefixValues); this.updateIndex(indexLabel, value, element.id(), expiredTime, removed); @@ -282,15 +283,15 @@ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { // TODO: add lock for updating unique index if (!removed && this.existUniqueValue(indexLabel, value, id)) { throw new IllegalArgumentException(String.format( - "Unique constraint %s conflict is found for %s", - indexLabel, element)); + "Unique constraint %s conflict is found for %s", + indexLabel, element)); } this.updateIndex(indexLabel, value, element.id(), expiredTime, removed); break; default: throw new AssertionError(String.format( - "Unknown index type '%s'", indexLabel.indexType())); + "Unknown index type '%s'", indexLabel.indexType())); } } @@ -355,8 +356,9 @@ private boolean existUniqueValueInStore(IndexLabel indexLabel, Object value) { * Single index, an index involving only one column. * Joint indexes, join of single indexes, composite indexes or mixed * of single indexes and composite indexes. + * * @param query original condition query - * @return converted id query + * @return converted id query */ @Watched(prefix = "index") public IdHolderList queryIndex(ConditionQuery query) { @@ -453,9 +455,9 @@ private IdHolderList queryByUserprop(ConditionQuery query) { PropertyKey propertyKey = this.graph().propertyKey(pkId); if (propertyKey.olap()) { throw new NotAllowException( - "Not allowed to query by olap property key '%s'" + - " when graph-read-mode is '%s'", - propertyKey, this.graph().readMode()); + "Not allowed to query by olap property key '%s'" + + " when graph-read-mode is '%s'", + propertyKey, this.graph().readMode()); } } } @@ -581,7 +583,7 @@ private IdHolder doJointIndex(IndexQueries queries) { } assert this.indexIntersectThresh > 0; // default value is 1000 Set ids = ((BatchIdHolder) holder).peekNext( - this.indexIntersectThresh).ids(); + this.indexIntersectThresh).ids(); if (ids.size() >= this.indexIntersectThresh) { // Transform into filtering filtering = true; @@ -619,7 +621,7 @@ private void storeSelectedIndexField(IndexLabel indexLabel, } ConditionQuery originConditionQuery = - query.originConditionQuery(); + query.originConditionQuery(); if (originConditionQuery != null) { originConditionQuery.selectedIndexField(indexLabel.indexField()); } @@ -743,8 +745,8 @@ private Set collectMatchedIndexes(ConditionQuery query) { schemaLabel = schema.getEdgeLabel(label); } else { throw new AssertionError(String.format( - "Unsupported index query type: %s", - query.resultType())); + "Unsupported index query type: %s", + query.resultType())); } schemaLabels = ImmutableList.of(schemaLabel); } else { @@ -755,8 +757,8 @@ private Set collectMatchedIndexes(ConditionQuery query) { schemaLabels = schema.getEdgeLabels(); } else { throw new AssertionError(String.format( - "Unsupported index query type: %s", - query.resultType())); + "Unsupported index query type: %s", + query.resultType())); } } @@ -773,8 +775,9 @@ private Set collectMatchedIndexes(ConditionQuery query) { /** * Collect matched IndexLabel(s) in a SchemaLabel for a query + * * @param schemaLabel find indexLabels of this schemaLabel - * @param query conditions container + * @param query conditions container * @return MatchedLabel object contains schemaLabel and matched indexLabels */ @Watched(prefix = "index") @@ -917,8 +920,8 @@ private void removeExpiredIndexIfNeeded(HugeIndex index, } private static Set matchSingleOrCompositeIndex( - ConditionQuery query, - Set indexLabels) { + ConditionQuery query, + Set indexLabels) { if (query.hasNeqCondition()) { return ImmutableSet.of(); } @@ -958,8 +961,8 @@ private static Set matchSingleOrCompositeIndex( * property-keys in query */ private static Set matchJointIndexes( - ConditionQuery query, - Set indexLabels) { + ConditionQuery query, + Set indexLabels) { if (query.hasNeqCondition()) { return ImmutableSet.of(); } @@ -1015,8 +1018,8 @@ private static Set matchJointIndexes( } private static Set matchRangeOrSearchIndexLabels( - ConditionQuery query, - Set indexLabels) { + ConditionQuery query, + Set indexLabels) { Set matchedIndexLabels = InsertionOrderUtil.newSet(); for (Relation relation : query.userpropRelations()) { if (!relation.relation().isRangeType() && @@ -1098,11 +1101,12 @@ private static IndexQueries buildJointIndexesQueries(ConditionQuery query, * Traverse C(m, n) combinations of a list to find first matched * result combination and call back with the result. * TODO: move this method to common module. - * @param all list to contain all items for combination - * @param m m of C(m, n) - * @param n n of C(m, n) + * + * @param all list to contain all items for combination + * @param m m of C(m, n) + * @param n n of C(m, n) * @param current current position in list - * @param result list to contains selected items + * @param result list to contains selected items * @return true if matched items combination else false */ private static boolean cmn(List all, int m, int n, @@ -1140,10 +1144,7 @@ private static boolean cmn(List all, int m, int n, } // Not select current item, continue to select C(m-1, n) result.remove(index); - if (cmn(all, m - 1, n, current, result, callback)) { - return true; - } - return false; + return cmn(all, m - 1, n, current, result, callback); } private static boolean shouldRecordIndexValue(ConditionQuery query, @@ -1154,8 +1155,8 @@ private static boolean shouldRecordIndexValue(ConditionQuery query, } private static IndexQueries constructJointSecondaryQueries( - ConditionQuery query, - List ils) { + ConditionQuery query, + List ils) { Set indexLabels = InsertionOrderUtil.newSet(); indexLabels.addAll(ils); indexLabels = matchJointIndexes(query, indexLabels); @@ -1241,8 +1242,8 @@ private static ConditionQuery constructQuery(ConditionQuery query, case RANGE_DOUBLE: if (query.userpropConditions().size() > 2) { throw new HugeException( - "Range query has two conditions at most, " + - "but got: %s", query.userpropConditions()); + "Range query has two conditions at most, " + + "but got: %s", query.userpropConditions()); } // Replace the query key with PROPERTY_VALUES, set number value indexQuery = new ConditionQuery(indexType.type(), query); @@ -1252,8 +1253,8 @@ private static ConditionQuery constructQuery(ConditionQuery query, Relation r = (Relation) condition; Number value = NumericUtil.convertToNumber(r.value()); Relation sys = new Condition.SyspropRelation( - HugeKeys.FIELD_VALUES, - r.relation(), value); + HugeKeys.FIELD_VALUES, + r.relation(), value); condition = condition.replace(r, sys); indexQuery.query(condition); } @@ -1263,13 +1264,13 @@ private static ConditionQuery constructQuery(ConditionQuery query, indexQuery = new ConditionQuery(type, query); indexQuery.eq(HugeKeys.INDEX_LABEL_ID, indexLabel.id()); List conditions = constructShardConditions( - query, indexLabel.indexFields(), - HugeKeys.FIELD_VALUES); + query, indexLabel.indexFields(), + HugeKeys.FIELD_VALUES); indexQuery.query(conditions); break; default: throw new AssertionError(String.format( - "Unknown index type '%s'", indexType)); + "Unknown index type '%s'", indexType)); } /* @@ -1287,9 +1288,9 @@ private static ConditionQuery constructQuery(ConditionQuery query, } protected static List constructShardConditions( - ConditionQuery query, - List fields, - HugeKeys key) { + ConditionQuery query, + List fields, + HugeKeys key) { List conditions = new ArrayList<>(2); boolean hasRange = false; int processedCondCount = 0; @@ -1380,9 +1381,9 @@ protected static List constructShardConditions( } private static Relation shardFieldValuesCondition(HugeKeys key, - List prefixes, - Object number, - RelationType type) { + List prefixes, + Object number, + RelationType type) { List values = new ArrayList<>(prefixes); String num = LongEncoding.encodeNumber(number); if (type == RelationType.LTE) { @@ -1488,9 +1489,9 @@ private static NoIndexException noIndexException(HugeGraph graph, ConditionQuery query, Id label) { String name = label == null ? "any label" : String.format("label '%s'", - query.resultType().isVertex() ? - graph.vertexLabel(label).name() : - graph.edgeLabel(label).name()); + query.resultType().isVertex() ? + graph.vertexLabel(label).name() : + graph.edgeLabel(label).name()); List mismatched = new ArrayList<>(); if (query.hasSecondaryCondition()) { mismatched.add("secondary"); @@ -1619,7 +1620,7 @@ public boolean equals(Object other) { } private static class IndexQueries - extends HashMap { + extends HashMap { private static final long serialVersionUID = 1400326138090922676L; private static final IndexQueries EMPTY = new IndexQueries(null); @@ -1664,7 +1665,7 @@ public Query rootQuery() { } public Query asJointQuery() { - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) Collection queries = (Collection) this.values(); return new JointQuery(this.rootQuery().resultType(), this.parentQuery, queries); @@ -1713,7 +1714,7 @@ private static Query parent(Collection queries) { } public static class RemoveLeftIndexJob extends EphemeralJob - implements EphemeralJobQueue.Reduce { + implements EphemeralJobQueue.Reduce { private static final String REMOVE_LEFT_INDEX = "remove_left_index"; @@ -1770,7 +1771,7 @@ protected long removeIndexLeft(ConditionQuery query, long rCount = 0; long sCount = 0; - for (ConditionQuery cq: ConditionQueryFlatten.flatten(query)) { + for (ConditionQuery cq : ConditionQueryFlatten.flatten(query)) { // Process range index rCount += this.processRangeIndexLeft(cq, element); // Process secondary index or search index @@ -1808,7 +1809,7 @@ private long processRangeIndexLeft(ConditionQuery query, private IndexLabel findMatchedIndexLabel(ConditionQuery query, ConditionQuery.LeftIndex - leftIndex) { + leftIndex) { Set matchedIndexes = this.tx.collectMatchedIndexes(query); for (MatchedIndex index : matchedIndexes) { for (IndexLabel label : index.indexLabels()) { @@ -1837,16 +1838,16 @@ private long processSecondaryOrSearchIndexLeft(ConditionQuery query, .map(PropertyKey::id) .collect(Collectors.toSet()); Collection incorrectIndexFields = CollectionUtil.intersect( - il.indexFields(), - incorrectPkIds); + il.indexFields(), + incorrectPkIds); if (incorrectIndexFields.isEmpty()) { continue; } // Skip if search index is not wrong if (il.indexType().isSearch()) { Id field = il.indexField(); - String cond = deletion.getPropertyValue(field); - String actual = element.getPropertyValue(field); + String cond = deletion.getPropertyValue(field); + String actual = element.getPropertyValue(field); if (this.tx.matchSearchIndexWords(actual, cond)) { /* * If query by two search index, one is correct but @@ -1878,8 +1879,8 @@ private long processSecondaryOrSearchIndexLeft(ConditionQuery query, } private HugeElement constructErrorElem( - ConditionQuery query, HugeElement element, - Map incorrectPKs) { + ConditionQuery query, HugeElement element, + Map incorrectPKs) { HugeElement errorElem = element.copyAsFresh(); Set propKeys = query.userpropKeys(); for (Id key : propKeys) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java index 01ccc0b24e..26d22e4f4d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java @@ -144,19 +144,19 @@ public GraphTransaction(HugeGraphParams graph, BackendStore store) { final HugeConfig conf = graph.configuration(); this.checkCustomVertexExist = - conf.get(CoreOptions.VERTEX_CHECK_CUSTOMIZED_ID_EXIST); + conf.get(CoreOptions.VERTEX_CHECK_CUSTOMIZED_ID_EXIST); this.checkAdjacentVertexExist = - conf.get(CoreOptions.VERTEX_ADJACENT_VERTEX_EXIST); + conf.get(CoreOptions.VERTEX_ADJACENT_VERTEX_EXIST); this.lazyLoadAdjacentVertex = - conf.get(CoreOptions.VERTEX_ADJACENT_VERTEX_LAZY); + conf.get(CoreOptions.VERTEX_ADJACENT_VERTEX_LAZY); this.removeLeftIndexOnOverwrite = - conf.get(CoreOptions.VERTEX_REMOVE_LEFT_INDEX); + conf.get(CoreOptions.VERTEX_REMOVE_LEFT_INDEX); this.commitPartOfAdjacentEdges = - conf.get(CoreOptions.VERTEX_PART_EDGE_COMMIT_SIZE); + conf.get(CoreOptions.VERTEX_PART_EDGE_COMMIT_SIZE); this.ignoreInvalidEntry = - conf.get(CoreOptions.QUERY_IGNORE_INVALID_DATA); + conf.get(CoreOptions.QUERY_IGNORE_INVALID_DATA); this.optimizeAggrByIndex = - conf.get(CoreOptions.QUERY_OPTIMIZE_AGGR_BY_INDEX); + conf.get(CoreOptions.QUERY_OPTIMIZE_AGGR_BY_INDEX); this.batchSize = conf.get(CoreOptions.QUERY_BATCH_SIZE); this.pageSize = conf.get(CoreOptions.QUERY_PAGE_SIZE); @@ -444,7 +444,7 @@ protected void prepareUpdates(Set> addedProps, // Eliminate the property(OUT and IN owner edge) this.doEliminate(this.serializer.writeEdgeProperty(prop)); this.doEliminate(this.serializer.writeEdgeProperty( - prop.switchEdgeOwner())); + prop.switchEdgeOwner())); } else { // Override edge(it will be in addedEdges & updatedEdges) this.addEdge(prop.element()); @@ -473,7 +473,7 @@ protected void prepareUpdates(Set> addedProps, // Append new property(OUT and IN owner edge) this.doAppend(this.serializer.writeEdgeProperty(prop)); this.doAppend(this.serializer.writeEdgeProperty( - prop.switchEdgeOwner())); + prop.switchEdgeOwner())); } else { // Override edge (it will be in addedEdges & updatedEdges) this.addEdge(prop.element()); @@ -535,7 +535,7 @@ public QueryResults query(Query query) { QueryList queries = this.optimizeQueries(query, super::query); LOG.debug("{}", queries); return queries.empty() ? QueryResults.empty() : - queries.fetch(this.pageSize); + queries.fetch(this.pageSize); } @Override @@ -776,7 +776,7 @@ protected Iterator queryVerticesByIds(Object[] vertexIds, if (vertex == null) { if (checkMustExist) { throw new NotFoundException( - "Vertex '%s' does not exist", id); + "Vertex '%s' does not exist", id); } else if (adjacentVertex) { assert !checkMustExist; // Return undefined if adjacentVertex but !checkMustExist @@ -1018,9 +1018,7 @@ protected Iterator queryEdgesFromBackend(Query query) { if (vertex == null) { return null; } - if (query.idsSize() == 1) { - assert vertex.getEdges().size() == 1; - } + assert query.idsSize() != 1 || vertex.getEdges().size() == 1; /* * Copy to avoid ConcurrentModificationException when removing edge * because HugeEdge.remove() will update edges in owner vertex @@ -1218,8 +1216,8 @@ public static ConditionQuery constructEdgesQuery(Id sourceVertex, // Edge direction if (direction == Directions.BOTH) { query.query(Condition.or( - Condition.eq(HugeKeys.DIRECTION, Directions.OUT), - Condition.eq(HugeKeys.DIRECTION, Directions.IN))); + Condition.eq(HugeKeys.DIRECTION, Directions.OUT), + Condition.eq(HugeKeys.DIRECTION, Directions.IN))); } else { assert direction == Directions.OUT || direction == Directions.IN; query.eq(HugeKeys.DIRECTION, direction); @@ -1380,8 +1378,8 @@ private static void verifyEdgesConditionQuery(ConditionQuery query) { if (matched != total) { throw new HugeException( - "Not supported querying edges by %s, expect %s", - query.conditions(), EdgeId.KEYS[count]); + "Not supported querying edges by %s, expect %s", + query.conditions(), EdgeId.KEYS[count]); } } @@ -1396,7 +1394,7 @@ private QueryList optimizeQueries(Query query, boolean supportIn = this.storeFeatures().supportsQueryWithInCondition(); for (ConditionQuery cq : ConditionQueryFlatten.flatten( - (ConditionQuery) query, supportIn)) { + (ConditionQuery) query, supportIn)) { // Optimize by sysprop Query q = this.optimizeQuery(cq); /* @@ -1416,7 +1414,7 @@ private QueryList optimizeQueries(Query query, private Query optimizeQuery(ConditionQuery query) { if (query.idsSize() > 0) { throw new HugeException( - "Not supported querying by id and conditions: %s", query); + "Not supported querying by id and conditions: %s", query); } Id label = query.condition(HugeKeys.LABEL); @@ -1458,7 +1456,7 @@ private Query optimizeQuery(ConditionQuery query) { // Serialize sort-values List keys = this.graph().edgeLabel(label).sortKeys(); List conditions = - GraphIndexTransaction.constructShardConditions( + GraphIndexTransaction.constructShardConditions( query, keys, HugeKeys.SORT_VALUES); query.query(conditions); /* @@ -1606,15 +1604,15 @@ private void checkNonnullProperty(HugeVertex vertex) { // Check whether passed all non-null property @SuppressWarnings("unchecked") Collection nonNullKeys = CollectionUtils.subtract( - vertexLabel.properties(), - vertexLabel.nullableKeys()); + vertexLabel.properties(), + vertexLabel.nullableKeys()); if (!keys.containsAll(nonNullKeys)) { @SuppressWarnings("unchecked") Collection missed = CollectionUtils.subtract(nonNullKeys, keys); HugeGraph graph = this.graph(); E.checkArgument(false, "All non-null property keys %s of " + - "vertex label '%s' must be set, missed keys %s", + "vertex label '%s' must be set, missed keys %s", graph.mapPkId2Name(nonNullKeys), vertexLabel.name(), graph.mapPkId2Name(missed)); } @@ -1641,11 +1639,11 @@ private void checkVertexExistIfCustomizedId(Map vertices) { HugeVertex newVertex = vertices.get(existedVertex.id()); if (!existedVertex.label().equals(newVertex.label())) { throw new HugeException( - "The newly added vertex with id:'%s' label:'%s' " + - "is not allowed to insert, because already exist " + - "a vertex with same id and different label:'%s'", - newVertex.id(), newVertex.label(), - existedVertex.label()); + "The newly added vertex with id:'%s' label:'%s' " + + "is not allowed to insert, because already exist " + + "a vertex with same id and different label:'%s'", + newVertex.id(), newVertex.label(), + existedVertex.label()); } } finally { CloseableIterator.closeIterator(results); @@ -1709,8 +1707,8 @@ private void removeLeftIndexIfNeeded(Map vertices) { } private Iterator filterUnmatchedRecords( - Iterator results, - Query query) { + Iterator results, + Query query) { // Filter unused or incorrect records return new FilterIterator<>(results, elem -> { // TODO: Left vertex/edge should to be auto removed via async task @@ -1729,12 +1727,9 @@ private Iterator filterUnmatchedRecords( return false; } // Process results that query from left index or primary-key - if (query.resultType().isVertex() == elem.type().isVertex() && - !rightResultFromIndexQuery(query, elem)) { - // Only index query will come here - return false; - } - return true; + // Only index query will come here + return query.resultType().isVertex() != elem.type().isVertex() || + rightResultFromIndexQuery(query, elem); }); } @@ -1786,7 +1781,7 @@ private boolean rightResultFromIndexQuery(Query query, HugeElement elem) { } private Iterator filterExpiredResultFromBackend( - Query query, Iterator results) { + Query query, Iterator results) { if (this.store().features().supportsTtl() || query.showExpired()) { return results; } @@ -1872,12 +1867,12 @@ private Iterator joinTxEdges(Query query, Iterator edges, } private Iterator joinTxRecords( - Query query, - Iterator records, - BiFunction matchFunc, - Map addedTxRecords, - Map removedTxRecords, - Map updatedTxRecords) { + Query query, + Iterator records, + BiFunction matchFunc, + Map addedTxRecords, + Map removedTxRecords, + Map updatedTxRecords) { this.checkOwnerThread(); // Return the origin results if there is no change in tx if (addedTxRecords.isEmpty() && @@ -1923,16 +1918,16 @@ private Iterator joinTxRecords( private void checkTxVerticesCapacity() throws LimitExceedException { if (this.verticesInTxSize() >= this.verticesCapacity) { throw new LimitExceedException( - "Vertices size has reached tx capacity %d", - this.verticesCapacity); + "Vertices size has reached tx capacity %d", + this.verticesCapacity); } } private void checkTxEdgesCapacity() throws LimitExceedException { if (this.edgesInTxSize() >= this.edgesCapacity) { throw new LimitExceedException( - "Edges size has reached tx capacity %d", - this.edgesCapacity); + "Edges size has reached tx capacity %d", + this.edgesCapacity); } } @@ -2075,7 +2070,7 @@ private void traverseByLabel(SchemaLabel label, HugeType type = label.type() == HugeType.VERTEX_LABEL ? HugeType.VERTEX : HugeType.EDGE; Query query = label.enableLabelIndex() ? new ConditionQuery(type) : - new Query(type); + new Query(type); query.capacity(Query.NO_CAPACITY); query.limit(Query.NO_LIMIT); if (this.store().features().supportsQueryByPage()) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IndexableTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IndexableTransaction.java index e3ce7fd243..7c481b05c2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IndexableTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IndexableTransaction.java @@ -17,10 +17,10 @@ package org.apache.hugegraph.backend.tx; -import org.apache.hugegraph.backend.store.BackendMutation; -import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.store.BackendMutation; +import org.apache.hugegraph.backend.store.BackendStore; public abstract class IndexableTransaction extends AbstractTransaction { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaIndexTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaIndexTransaction.java index 54443fa0db..1fbade3190 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaIndexTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaIndexTransaction.java @@ -19,15 +19,13 @@ import java.util.Iterator; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - -import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.SchemaElement; @@ -35,6 +33,7 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public class SchemaIndexTransaction extends AbstractTransaction { @@ -86,7 +85,7 @@ private QueryResults queryByName(ConditionQuery query) { IndexLabel il = IndexLabel.label(query.resultType()); String name = query.condition(HugeKeys.NAME); E.checkState(name != null, "The name in condition can't be null " + - "when querying schema by name"); + "when querying schema by name"); ConditionQuery indexQuery; indexQuery = new ConditionQuery(HugeType.SECONDARY_INDEX, query); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java index 6d27ff39de..1a42cf705a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java @@ -23,21 +23,18 @@ import java.util.Set; import java.util.function.Consumer; -import org.apache.hugegraph.backend.query.ConditionQuery; -import org.apache.hugegraph.backend.query.Query; -import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.hugegraph.backend.store.BackendEntry; -import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.backend.store.SystemSchemaStore; -import org.apache.tinkerpop.gremlin.structure.Graph; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.LocalCounter; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.query.ConditionQuery; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.backend.store.SystemSchemaStore; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.NotAllowException; import org.apache.hugegraph.job.JobBuilder; @@ -66,6 +63,9 @@ import org.apache.hugegraph.util.DateUtil; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.LockUtil; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; + import com.google.common.collect.ImmutableSet; public class SchemaTransaction extends IndexableTransaction { @@ -160,9 +160,9 @@ public Id removePropertyKey(Id id) { for (VertexLabel vertexLabel : vertexLabels) { if (vertexLabel.properties().contains(id)) { throw new NotAllowException( - "Not allowed to remove property key: '%s' " + - "because the vertex label '%s' is still using it.", - propertyKey, vertexLabel.name()); + "Not allowed to remove property key: '%s' " + + "because the vertex label '%s' is still using it.", + propertyKey, vertexLabel.name()); } } @@ -170,9 +170,9 @@ public Id removePropertyKey(Id id) { for (EdgeLabel edgeLabel : edgeLabels) { if (edgeLabel.properties().contains(id)) { throw new NotAllowException( - "Not allowed to remove property key: '%s' " + - "because the edge label '%s' is still using it.", - propertyKey, edgeLabel.name()); + "Not allowed to remove property key: '%s' " + + "because the edge label '%s' is still using it.", + propertyKey, edgeLabel.name()); } } @@ -469,10 +469,11 @@ protected T getSchema(HugeType type, Id id) { /** * Currently doesn't allow to exist schema with the same name + * * @param type the query schema type * @param name the query schema name * @param SubClass of SchemaElement - * @return the queried schema object + * @return the queried schema object */ protected T getSchema(HugeType type, String name) { @@ -558,7 +559,7 @@ private BackendEntry serialize(SchemaElement schema) { return this.serializer.writeIndexLabel((IndexLabel) schema); default: throw new AssertionError(String.format( - "Unknown schema type '%s'", schema.type())); + "Unknown schema type '%s'", schema.type())); } } @@ -575,7 +576,7 @@ private T deserialize(BackendEntry entry, HugeType type) { return (T) this.serializer.readIndexLabel(this.graph(), entry); default: throw new AssertionError(String.format( - "Unknown schema type '%s'", type)); + "Unknown schema type '%s'", type)); } } @@ -619,7 +620,7 @@ private void checkIdAndUpdateNextId(HugeType type, Id id, return; } throw new IllegalStateException(String.format( - "Invalid system id '%s'", id)); + "Invalid system id '%s'", id)); } E.checkState(id.number() && id.asLong() > 0L, "Schema id must be number and >0, but got '%s'", id); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/config/CoreOptions.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/config/CoreOptions.java index 387bab9481..9b9bc5ca32 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/config/CoreOptions.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/config/CoreOptions.java @@ -146,28 +146,28 @@ public static synchronized CoreOptions instance() { ); public static final ConfigOption RAFT_SNAPSHOT_PARALLEL_COMPRESS = - new ConfigOption<>( - "raft.snapshot_parallel_compress", - "Whether to enable parallel compress.", - disallowEmpty(), - false - ); + new ConfigOption<>( + "raft.snapshot_parallel_compress", + "Whether to enable parallel compress.", + disallowEmpty(), + false + ); public static final ConfigOption RAFT_SNAPSHOT_COMPRESS_THREADS = - new ConfigOption<>( - "raft.snapshot_compress_threads", - "The thread number used to do snapshot compress.", - rangeInt(0, Integer.MAX_VALUE), - 4 - ); + new ConfigOption<>( + "raft.snapshot_compress_threads", + "The thread number used to do snapshot compress.", + rangeInt(0, Integer.MAX_VALUE), + 4 + ); public static final ConfigOption RAFT_SNAPSHOT_DECOMPRESS_THREADS = - new ConfigOption<>( - "raft.snapshot_decompress_threads", - "The thread number used to do snapshot decompress.", - rangeInt(0, Integer.MAX_VALUE), - 4 - ); + new ConfigOption<>( + "raft.snapshot_decompress_threads", + "The thread number used to do snapshot decompress.", + rangeInt(0, Integer.MAX_VALUE), + 4 + ); public static final ConfigOption RAFT_BACKEND_THREADS = new ConfigOption<>( @@ -422,7 +422,7 @@ public static synchronized CoreOptions instance() { new ConfigOption<>( "edge.tx_capacity", "The max size(items) of edges(uncommitted) in " + - "transaction.", + "transaction.", rangeInt(COMMIT_BATCH, 1000000), 10000 ); @@ -605,7 +605,7 @@ public static synchronized CoreOptions instance() { "Specify the mode for the text analyzer, " + "the available mode of analyzer are " + "ansj: [BaseAnalysis, IndexAnalysis, ToAnalysis, " + - "NlpAnalysis], " + + "NlpAnalysis], " + "hanlp: [standard, nlp, index, nShort, shortest, speed], " + "smartcn: [], " + "jieba: [SEARCH, INDEX], " + diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphIoRegistry.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphIoRegistry.java index b9154e56cd..71733fa588 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphIoRegistry.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphIoRegistry.java @@ -23,7 +23,7 @@ public class HugeGraphIoRegistry extends AbstractIoRegistry { private static final HugeGraphIoRegistry INSTANCE = - new HugeGraphIoRegistry(); + new HugeGraphIoRegistry(); public static HugeGraphIoRegistry instance() { return INSTANCE; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphSONModule.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphSONModule.java index c641a26687..416c3b0e4c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphSONModule.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGraphSONModule.java @@ -29,6 +29,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; +import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; @@ -40,6 +41,10 @@ import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.structure.HugeProperty; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.Blob; import org.apache.tinkerpop.gremlin.process.traversal.Path; @@ -64,12 +69,6 @@ import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.UUIDSerializer; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.structure.HugeElement; -import org.apache.hugegraph.structure.HugeProperty; -import org.apache.hugegraph.structure.HugeVertex; - @SuppressWarnings("serial") public class HugeGraphSONModule extends TinkerPopJacksonModule { @@ -83,7 +82,7 @@ public class HugeGraphSONModule extends TinkerPopJacksonModule { private static final Map TYPE_DEFINITIONS; private static final GraphSONSchemaSerializer SCHEMA_SERIALIZER = - new GraphSONSchemaSerializer(); + new GraphSONSchemaSerializer(); // NOTE: jackson will synchronize DateFormat private static final String DF = "yyyy-MM-dd HH:mm:ss.SSS"; @@ -158,7 +157,7 @@ public static void registerCommonSerializers(SimpleModule module) { module.addSerializer(Date.class, new DateSerializer(useTimestamp, DATE_FORMAT)); module.addDeserializer(Date.class, new DateDeserializer( - new DateDeserializer(), DATE_FORMAT, DF)); + new DateDeserializer(), DATE_FORMAT, DF)); module.addSerializer(UUID.class, new UUIDSerializer()); module.addDeserializer(UUID.class, new UUIDDeserializer()); @@ -220,7 +219,7 @@ public OptionalSerializer() { public void serialize(Optional optional, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { if (optional.isPresent()) { jsonGenerator.writeObject(optional.get()); } else { @@ -239,7 +238,7 @@ public IdSerializer(Class clazz) { public void serialize(T value, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { if (value.number()) { jsonGenerator.writeNumber(value.asLong()); } else { @@ -252,7 +251,7 @@ public void serializeWithType(T value, JsonGenerator jsonGenerator, SerializerProvider provider, TypeSerializer typeSer) - throws IOException { + throws IOException { // https://github.com/FasterXML/jackson-databind/issues/2320 WritableTypeId typeId = typeSer.typeId(value, JsonToken.VALUE_STRING); typeSer.writeTypePrefix(jsonGenerator, typeId); @@ -262,7 +261,7 @@ public void serializeWithType(T value, } private static class IdDeserializer - extends StdDeserializer { + extends StdDeserializer { public IdDeserializer(Class clazz) { super(clazz); @@ -272,7 +271,7 @@ public IdDeserializer(Class clazz) { @Override public T deserialize(JsonParser jsonParser, DeserializationContext ctxt) - throws IOException { + throws IOException { Class clazz = this.handledType(); if (clazz.equals(LongId.class)) { Number idValue = ctxt.readValue(jsonParser, Number.class); @@ -292,7 +291,7 @@ public T deserialize(JsonParser jsonParser, } private static class PropertyKeySerializer - extends StdSerializer { + extends StdSerializer { public PropertyKeySerializer() { super(PropertyKey.class); @@ -302,13 +301,13 @@ public PropertyKeySerializer() { public void serialize(PropertyKey pk, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { writeEntry(jsonGenerator, SCHEMA_SERIALIZER.writePropertyKey(pk)); } } private static class VertexLabelSerializer - extends StdSerializer { + extends StdSerializer { public VertexLabelSerializer() { super(VertexLabel.class); @@ -318,7 +317,7 @@ public VertexLabelSerializer() { public void serialize(VertexLabel vl, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { writeEntry(jsonGenerator, SCHEMA_SERIALIZER.writeVertexLabel(vl)); } } @@ -333,13 +332,13 @@ public EdgeLabelSerializer() { public void serialize(EdgeLabel el, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { writeEntry(jsonGenerator, SCHEMA_SERIALIZER.writeEdgeLabel(el)); } } private static class IndexLabelSerializer - extends StdSerializer { + extends StdSerializer { public IndexLabelSerializer() { super(IndexLabel.class); @@ -349,14 +348,14 @@ public IndexLabelSerializer() { public void serialize(IndexLabel il, JsonGenerator jsonGenerator, SerializerProvider provider) - throws IOException { + throws IOException { writeEntry(jsonGenerator, SCHEMA_SERIALIZER.writeIndexLabel(il)); } } private static void writeEntry(JsonGenerator jsonGenerator, Map schema) - throws IOException { + throws IOException { jsonGenerator.writeStartObject(); for (Map.Entry entry : schema.entrySet()) { jsonGenerator.writeFieldName(entry.getKey().string()); @@ -366,7 +365,7 @@ private static void writeEntry(JsonGenerator jsonGenerator, } protected abstract static class HugeElementSerializer - extends StdSerializer { + extends StdSerializer { public HugeElementSerializer(Class clazz) { super(clazz); @@ -374,7 +373,7 @@ public HugeElementSerializer(Class clazz) { public void writeIdField(String fieldName, Id id, JsonGenerator generator) - throws IOException { + throws IOException { generator.writeFieldName(fieldName); if (id.number()) { generator.writeNumber(id.asLong()); @@ -386,7 +385,7 @@ public void writeIdField(String fieldName, Id id, public void writePropertiesField(Collection> properties, JsonGenerator generator, SerializerProvider provider) - throws IOException { + throws IOException { // Start write properties generator.writeFieldName("properties"); generator.writeStartObject(); @@ -405,8 +404,8 @@ public void writePropertiesField(Collection> properties, } } catch (IOException e) { throw new HugeException( - "Failed to serialize property(%s: %s) " + - "for vertex '%s'", key, val, property.element()); + "Failed to serialize property(%s: %s) " + + "for vertex '%s'", key, val, property.element()); } } // End write properties @@ -415,7 +414,7 @@ public void writePropertiesField(Collection> properties, } private static class HugeVertexSerializer - extends HugeElementSerializer { + extends HugeElementSerializer { public HugeVertexSerializer() { super(HugeVertex.class); @@ -424,7 +423,7 @@ public HugeVertexSerializer() { @Override public void serialize(HugeVertex vertex, JsonGenerator generator, SerializerProvider provider) - throws IOException { + throws IOException { vertex.forceLoad(); generator.writeStartObject(); @@ -442,7 +441,7 @@ public void serialize(HugeVertex vertex, JsonGenerator generator, public void serializeWithType(HugeVertex value, JsonGenerator generator, SerializerProvider provider, TypeSerializer typeSer) - throws IOException { + throws IOException { WritableTypeId typeId = typeSer.typeId(value, JsonToken.VALUE_STRING); typeSer.writeTypePrefix(generator, typeId); this.serialize(value, generator, provider); @@ -451,7 +450,7 @@ public void serializeWithType(HugeVertex value, JsonGenerator generator, } private static class HugeEdgeSerializer - extends HugeElementSerializer { + extends HugeElementSerializer { public HugeEdgeSerializer() { super(HugeEdge.class); @@ -460,7 +459,7 @@ public HugeEdgeSerializer() { @Override public void serialize(HugeEdge edge, JsonGenerator generator, SerializerProvider provider) - throws IOException { + throws IOException { edge.forceLoad(); generator.writeStartObject(); @@ -486,7 +485,7 @@ public void serialize(HugeEdge edge, JsonGenerator generator, public void serializeWithType(HugeEdge value, JsonGenerator generator, SerializerProvider provider, TypeSerializer typeSer) - throws IOException { + throws IOException { WritableTypeId typeId = typeSer.typeId(value, JsonToken.VALUE_STRING); typeSer.writeTypePrefix(generator, typeId); this.serialize(value, generator, provider); @@ -591,7 +590,7 @@ public BlobDeserializer() { @Override public Blob deserialize(JsonParser jsonParser, DeserializationContext ctxt) - throws IOException { + throws IOException { byte[] bytes = jsonParser.getBinaryValue(); return Blob.wrap(bytes); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGryoModule.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGryoModule.java index 334eba96f8..0feee228f6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGryoModule.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/io/HugeGryoModule.java @@ -40,7 +40,7 @@ public class HugeGryoModule { private static GraphSONSchemaSerializer schemaSerializer = - new GraphSONSchemaSerializer(); + new GraphSONSchemaSerializer(); public static void register(HugeGraphIoRegistry io) { io.register(GryoIo.class, Optional.class, new OptionalSerializer()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/ComputerJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/ComputerJob.java index 6665304b69..5e1f93e145 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/ComputerJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/ComputerJob.java @@ -22,8 +22,8 @@ import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.job.computer.Computer; import org.apache.hugegraph.job.computer.ComputerPool; -import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; public class ComputerJob extends SysJob { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/EphemeralJobBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/EphemeralJobBuilder.java index 81e9e3d24d..a8a0966fa7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/EphemeralJobBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/EphemeralJobBuilder.java @@ -17,11 +17,11 @@ package org.apache.hugegraph.job; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.task.HugeTask; import org.apache.hugegraph.task.TaskScheduler; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.util.E; public class EphemeralJobBuilder { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/GremlinJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/GremlinJob.java index 491fe29422..8bde61feb4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/GremlinJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/GremlinJob.java @@ -23,10 +23,10 @@ import java.util.Map; import org.apache.hugegraph.backend.query.Query; -import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.exception.LimitExceedException; import org.apache.hugegraph.traversal.optimize.HugeScriptTraversal; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; public class GremlinJob extends UserJob { @@ -71,9 +71,9 @@ public Object execute() throws Exception { bindings.put(TASK_BIND_NAME, new GremlinJobProxy()); HugeScriptTraversal traversal = new HugeScriptTraversal<>( - this.graph().traversal(), - language, gremlin, - bindings, aliases); + this.graph().traversal(), + language, gremlin, + bindings, aliases); List results = new ArrayList<>(); long capacity = Query.defaultCapacity(Query.NO_CAPACITY); try { @@ -105,8 +105,8 @@ private void checkResultsSize(Object results) { } if (size > TASK_RESULTS_MAX_SIZE) { throw new LimitExceedException( - "Job results size %s has exceeded the max limit %s", - size, TASK_RESULTS_MAX_SIZE); + "Job results size %s has exceeded the max limit %s", + size, TASK_RESULTS_MAX_SIZE); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/JobBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/JobBuilder.java index 11868a5601..fc7067b419 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/JobBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/JobBuilder.java @@ -19,12 +19,12 @@ import java.util.Set; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.task.HugeTask; import org.apache.hugegraph.task.TaskCallable; import org.apache.hugegraph.task.TaskScheduler; import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.util.E; public class JobBuilder { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AbstractAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AbstractAlgorithm.java index 51cde31f65..0e249ebe25 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AbstractAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AbstractAlgorithm.java @@ -29,18 +29,10 @@ import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.mutable.MutableLong; +import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Element; -import org.apache.tinkerpop.gremlin.structure.Property; -import org.apache.tinkerpop.gremlin.structure.Transaction; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - -import org.apache.hugegraph.HugeException; import org.apache.hugegraph.iterator.FilterIterator; import org.apache.hugegraph.iterator.FlatMapperIterator; import org.apache.hugegraph.job.UserJob; @@ -55,6 +47,14 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.ParameterUtil; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Element; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Transaction; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; + import com.google.common.base.Objects; @SuppressWarnings("deprecation") // StringEscapeUtils @@ -87,7 +87,7 @@ public abstract class AbstractAlgorithm implements Algorithm { public static final String KEY_TIMES = "times"; public static final String KEY_STABLE_TIMES = "stable_times"; public static final String KEY_PRECISION = "precision"; - public static final String KEY_SHOW_MOD= "show_modularity"; + public static final String KEY_SHOW_MOD = "show_modularity"; public static final String KEY_SHOW_COMM = "show_community"; public static final String KEY_EXPORT_COMM = "export_community"; public static final String KEY_SKIP_ISOLATED = "skip_isolated"; @@ -104,7 +104,7 @@ public abstract class AbstractAlgorithm implements Algorithm { public static final long DEFAULT_DEGREE = 100L; public static final long DEFAULT_SAMPLE = 1L; public static final long DEFAULT_TIMES = 20L; - public static final long DEFAULT_STABLE_TIMES= 3L; + public static final long DEFAULT_STABLE_TIMES = 3L; public static final double DEFAULT_PRECISION = 1.0 / 1000; public static final double DEFAULT_ALPHA = 0.5D; @@ -267,13 +267,13 @@ protected static Directions parseDirection(Object direction) { return Directions.IN; } else { throw new IllegalArgumentException(String.format( - "The value of direction must be in [OUT, IN, BOTH], " + - "but got '%s'", direction)); + "The value of direction must be in [OUT, IN, BOTH], " + + "but got '%s'", direction)); } } public static class AlgoTraverser extends HugeTraverser - implements AutoCloseable { + implements AutoCloseable { private final UserJob job; protected final ExecutorService executor; @@ -369,7 +369,7 @@ protected Iterator vertices(Object label, String key, if (value != null) { vertices = filter(vertices, key, value); } - return vertices; + return vertices; } protected Iterator vertices(Object label, long limit) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AlgorithmPool.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AlgorithmPool.java index 3644ab67be..0a73e2037d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AlgorithmPool.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/AlgorithmPool.java @@ -20,9 +20,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.hugegraph.job.algorithm.path.RingsDetectAlgorithm; -import org.apache.hugegraph.job.algorithm.rank.PageRankAlgorithm; -import org.apache.hugegraph.job.algorithm.similarity.FusiformSimilarityAlgorithm; import org.apache.hugegraph.job.algorithm.cent.BetweennessCentralityAlgorithm; import org.apache.hugegraph.job.algorithm.cent.BetweennessCentralityAlgorithmV2; import org.apache.hugegraph.job.algorithm.cent.ClosenessCentralityAlgorithm; @@ -37,6 +34,9 @@ import org.apache.hugegraph.job.algorithm.comm.LpaAlgorithm; import org.apache.hugegraph.job.algorithm.comm.TriangleCountAlgorithm; import org.apache.hugegraph.job.algorithm.comm.WeakConnectedComponent; +import org.apache.hugegraph.job.algorithm.path.RingsDetectAlgorithm; +import org.apache.hugegraph.job.algorithm.rank.PageRankAlgorithm; +import org.apache.hugegraph.job.algorithm.similarity.FusiformSimilarityAlgorithm; import org.apache.hugegraph.util.E; public class AlgorithmPool { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/BfsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/BfsTraverser.java index b3d12f9f9f..4a6cda246e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/BfsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/BfsTraverser.java @@ -24,15 +24,14 @@ import java.util.Stack; import org.apache.hugegraph.backend.id.Id; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.type.define.Directions; +import org.apache.tinkerpop.gremlin.structure.Edge; public abstract class BfsTraverser - extends AbstractAlgorithm.AlgoTraverser - implements AutoCloseable { + extends AbstractAlgorithm.AlgoTraverser + implements AutoCloseable { private final Stack traversedVertices = new Stack<>(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/Consumers.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/Consumers.java index bc4ece38b3..dc04bdf605 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/Consumers.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/Consumers.java @@ -25,12 +25,11 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.task.TaskManager.ContextCallable; import org.apache.hugegraph.util.ExecutorUtil; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public class Consumers { @@ -147,7 +146,7 @@ public void provide(V v) throws Throwable { try { this.queue.put(v); } catch (InterruptedException e) { - LOG.warn("Interrupted", e);; + LOG.warn("Interrupted", e); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountEdgeAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountEdgeAlgorithm.java index 62252b7d4a..fd20f8a827 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountEdgeAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountEdgeAlgorithm.java @@ -22,10 +22,9 @@ import java.util.Map; import org.apache.commons.lang3.mutable.MutableLong; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.util.JsonUtil; +import org.apache.tinkerpop.gremlin.structure.Edge; public class CountEdgeAlgorithm extends AbstractAlgorithm { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountVertexAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountVertexAlgorithm.java index 960f8fc8f4..c325370b3b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountVertexAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/CountVertexAlgorithm.java @@ -22,11 +22,10 @@ import java.util.Map; import org.apache.commons.lang3.mutable.MutableLong; +import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.util.JsonUtil; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; - public class CountVertexAlgorithm extends AbstractAlgorithm { @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/SubgraphStatAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/SubgraphStatAlgorithm.java index 2bf2d36871..d6a1be8aa2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/SubgraphStatAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/SubgraphStatAlgorithm.java @@ -21,15 +21,10 @@ import java.util.Map; import org.apache.commons.configuration2.PropertiesConfiguration; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.config.CoreOptions; -import org.apache.hugegraph.util.ParameterUtil; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.StandardHugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.task.HugeTask; @@ -38,6 +33,11 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.util.ParameterUtil; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableMap; public class SubgraphStatAlgorithm extends AbstractAlgorithm { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/AbstractCentAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/AbstractCentAlgorithm.java index 4a76b77cb7..8ab6ca81f7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/AbstractCentAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/AbstractCentAlgorithm.java @@ -25,6 +25,13 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.iterator.MapperIterator; +import org.apache.hugegraph.job.UserJob; +import org.apache.hugegraph.job.algorithm.AbstractAlgorithm; +import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.Log; import org.apache.tinkerpop.gremlin.process.traversal.Order; import org.apache.tinkerpop.gremlin.process.traversal.Pop; import org.apache.tinkerpop.gremlin.process.traversal.Scope; @@ -35,14 +42,6 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.slf4j.Logger; -import org.apache.hugegraph.iterator.MapperIterator; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.job.algorithm.AbstractAlgorithm; -import org.apache.hugegraph.structure.HugeElement; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.type.define.Directions; -import org.apache.hugegraph.util.Log; - public abstract class AbstractCentAlgorithm extends AbstractAlgorithm { private static final Logger LOG = Log.logger(AbstractCentAlgorithm.class); @@ -72,11 +71,11 @@ public Traverser(UserJob job) { } protected GraphTraversal constructSource( - String sourceLabel, - long sourceSample, - String sourceCLabel) { + String sourceLabel, + long sourceSample, + String sourceCLabel) { GraphTraversal t = this.graph().traversal() - .withSack(1f).V(); + .withSack(1f).V(); if (sourceLabel != null) { t = t.hasLabel(sourceLabel); @@ -95,9 +94,9 @@ protected GraphTraversal constructSource( } protected GraphTraversal constructPath( - GraphTraversal t, Directions dir, - String label, long degree, long sample, - String sourceLabel, String sourceCLabel) { + GraphTraversal t, Directions dir, + String label, long degree, long sample, + String sourceLabel, String sourceCLabel) { GraphTraversal unit = constructPathUnit(dir, label, degree, sample, sourceLabel, @@ -108,10 +107,10 @@ protected GraphTraversal constructPath( } protected GraphTraversal constructPathUnit( - Directions dir, String label, - long degree, long sample, - String sourceLabel, - String sourceCLabel) { + Directions dir, String label, + long degree, long sample, + String sourceLabel, + String sourceCLabel) { if (dir == null) { dir = Directions.BOTH; } @@ -139,10 +138,10 @@ protected GraphTraversal constructPathUnit( } protected GraphTraversal filterNonShortestPath( - GraphTraversal t, - boolean keepOneShortestPath) { + GraphTraversal t, + boolean keepOneShortestPath) { long size = this.graph().traversal().V().limit(100000L) - .count().next(); + .count().next(); Map, Integer> triples = new HashMap<>((int) size); return t.filter(it -> { Id start = it.path(Pop.first, "v").id(); @@ -168,15 +167,15 @@ protected GraphTraversal filterNonShortestPath( } protected GraphTraversal substractPath( - GraphTraversal t, - boolean withBoundary) { + GraphTraversal t, + boolean withBoundary) { // t.select(Pop.all, "v").unfold().id() return t.select(Pop.all, "v").flatMap(it -> { List path = (List) it.get(); if (withBoundary) { @SuppressWarnings("unchecked") Iterator items = (Iterator) - path.iterator(); + path.iterator(); return new MapperIterator<>(items, HugeVertex::id); } int len = path.size(); @@ -185,11 +184,11 @@ protected GraphTraversal substractPath( } LOG.debug("CentAlgorithm substract path: {}", path); - path.remove(path.size() -1); + path.remove(path.size() - 1); path.remove(0); @SuppressWarnings("unchecked") Iterator items = (Iterator) - path.iterator(); + path.iterator(); return new MapperIterator<>(items, HugeVertex::id); }); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithm.java index b63bab4043..512d14a158 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithm.java @@ -25,16 +25,15 @@ import org.apache.commons.lang3.mutable.MutableFloat; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.SplicingIdGenerator; +import org.apache.hugegraph.job.UserJob; +import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.type.define.Directions; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.Pop; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.structure.HugeElement; -import org.apache.hugegraph.type.define.Directions; - public class BetweennessCentralityAlgorithm extends AbstractCentAlgorithm { @Override @@ -97,7 +96,7 @@ public Object betweennessCentrality(Directions direction, } protected GraphTraversal groupPathByEndpoints( - GraphTraversal t) { + GraphTraversal t) { return t.map(it -> { // t.select(Pop.all, "v").unfold().id() List path = it.path(Pop.all, "v"); @@ -112,13 +111,13 @@ public Object betweennessCentrality(Directions direction, List path = (List) it; assert path.size() >= 2; String first = path.get(0).toString(); - String last = path.get(path.size() -1).toString(); + String last = path.get(path.size() - 1).toString(); return SplicingIdGenerator.concat(first, last); }).unfold(); } protected GraphTraversal computeBetweenness( - GraphTraversal t) { + GraphTraversal t) { return t.fold(new HashMap(), (results, it) -> { @SuppressWarnings("unchecked") Map.Entry> entry = (Map.Entry>) it; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithmV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithmV2.java index 63dd9210b6..d33dc54eb8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithmV2.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/BetweennessCentralityAlgorithmV2.java @@ -24,13 +24,12 @@ import org.apache.commons.lang3.mutable.MutableFloat; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Query; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.job.algorithm.BfsTraverser; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.type.define.Directions; +import org.apache.tinkerpop.gremlin.structure.Vertex; public class BetweennessCentralityAlgorithmV2 extends AbstractCentAlgorithm { @@ -129,7 +128,7 @@ protected void backtrack(Id startVertex, Id currentVertex, return; } MutableFloat betweenness = this.globalBetweennesses.get( - currentVertex); + currentVertex); if (betweenness == null) { betweenness = new MutableFloat(0.0F); this.globalBetweennesses.put(currentVertex, betweenness); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithm.java index cec4406154..db7eff5a48 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithm.java @@ -19,6 +19,8 @@ import java.util.Map; +import org.apache.hugegraph.job.UserJob; +import org.apache.hugegraph.type.define.Directions; import org.apache.tinkerpop.gremlin.process.traversal.Operator; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.Pop; @@ -27,9 +29,6 @@ import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.type.define.Directions; - public class ClosenessCentralityAlgorithm extends AbstractCentAlgorithm { public static final long DEFAULT_DEGREE = 100L; @@ -96,8 +95,8 @@ public Object closenessCentrality(Directions direction, */ GraphTraversal tg; tg = t.group().by(__.select(Pop.first, "v").id()) - .by(__.select(Pop.all, "v").count(Scope.local) - .math("_-1").sack(Operator.div).sack().sum()); + .by(__.select(Pop.all, "v").count(Scope.local) + .math("_-1").sack(Operator.div).sack().sum()); GraphTraversal tLimit = topN(tg, topN); return this.execute(tLimit, tLimit::next); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithmV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithmV2.java index 3879f29dda..e95eae587c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithmV2.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/ClosenessCentralityAlgorithmV2.java @@ -16,6 +16,10 @@ */ package org.apache.hugegraph.job.algorithm.cent; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.exception.NotSupportException; @@ -26,10 +30,6 @@ import org.apache.hugegraph.type.define.Directions; import org.apache.tinkerpop.gremlin.structure.Vertex; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - public class ClosenessCentralityAlgorithmV2 extends AbstractCentAlgorithm { @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/DegreeCentralityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/DegreeCentralityAlgorithm.java index 4c44d2632b..5a995d6dd7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/DegreeCentralityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/DegreeCentralityAlgorithm.java @@ -23,13 +23,12 @@ import java.util.Map; import org.apache.hugegraph.backend.id.Id; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; public class DegreeCentralityAlgorithm extends AbstractCentAlgorithm { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/EigenvectorCentralityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/EigenvectorCentralityAlgorithm.java index 13e4a6dc90..ef45587d41 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/EigenvectorCentralityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/EigenvectorCentralityAlgorithm.java @@ -19,14 +19,13 @@ import java.util.Map; +import org.apache.hugegraph.job.UserJob; +import org.apache.hugegraph.type.define.Directions; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.type.define.Directions; - public class EigenvectorCentralityAlgorithm extends AbstractCentAlgorithm { public static final long DEFAULT_DEGREE = 100L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithm.java index 30890987b4..8371f73807 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithm.java @@ -19,15 +19,14 @@ import java.util.Map; +import org.apache.hugegraph.job.UserJob; +import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.ParameterUtil; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.type.define.Directions; -import org.apache.hugegraph.util.ParameterUtil; - public class StressCentralityAlgorithm extends AbstractCentAlgorithm { public static final String KEY_WITH_BOUNDARY = "with_boundary"; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithmV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithmV2.java index 314c2b837d..5817e03867 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithmV2.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/cent/StressCentralityAlgorithmV2.java @@ -24,13 +24,12 @@ import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Query; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.job.algorithm.BfsTraverser; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.type.define.Directions; +import org.apache.tinkerpop.gremlin.structure.Vertex; public class StressCentralityAlgorithmV2 extends AbstractCentAlgorithm { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/ClusterCoefficientAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/ClusterCoefficientAlgorithm.java index 6399b86e66..e0f0ca5a6f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/ClusterCoefficientAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/ClusterCoefficientAlgorithm.java @@ -73,7 +73,7 @@ public Object clusterCoefficient(Directions direction, long degree) { assert triangles <= triads; double coefficient = triads == 0L ? 0d : 1d * triangles / triads; - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) Map converted = (Map) results; converted.put("cluster_coefficient", coefficient); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/KCoreAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/KCoreAlgorithm.java index 58a63986c9..e51aa27ac0 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/KCoreAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/KCoreAlgorithm.java @@ -26,11 +26,8 @@ import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.hugegraph.backend.id.Id; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; - import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.traversal.algorithm.FusiformSimilarityTraverser; import org.apache.hugegraph.type.define.Directions; @@ -38,6 +35,9 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.ParameterUtil; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; + import com.google.common.collect.ImmutableSet; public class KCoreAlgorithm extends AbstractCommAlgorithm { @@ -198,7 +198,7 @@ private static Set extractKcore(SimilarsMap similarsMap, int k) { .iterator().next(); Id source = entry.getKey(); Set similars = new HashSet<>(); - for (Similar similar: entry.getValue()) { + for (Similar similar : entry.getValue()) { similars.add(new KcoreSimilar(similar)); } @@ -236,7 +236,8 @@ private static Set extractKcore(SimilarsMap similarsMap, int k) { } Set survivedIds = new HashSet<>(CollectionUtils - .subtract(similar.ids(), failedIds)); + .subtract(similar.ids(), + failedIds)); if (survivedIds.size() < k) { for (Id id : survivedIds) { counts.get(id).decrement(); @@ -247,7 +248,7 @@ private static Set extractKcore(SimilarsMap similarsMap, int k) { } } similars = new HashSet<>(CollectionUtils.subtract( - similars, failedSimilars)); + similars, failedSimilars)); } while (!stop); if (similars.isEmpty()) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainAlgorithm.java index 5b5a8ca5f8..17bfd22ad2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainAlgorithm.java @@ -62,8 +62,8 @@ public Object call(UserJob job, Map parameters) { Long exportPass = exportCommunity(parameters); try (LouvainTraverser traverser = new LouvainTraverser( - job, workers, degree, - label, clabel, skipIsolated)) { + job, workers, degree, + label, clabel, skipIsolated)) { if (clearPass != null) { return traverser.clearPass(clearPass.intValue()); } else if (modPass != null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainTraverser.java index 8b8158b16e..6b125d6895 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LouvainTraverser.java @@ -40,28 +40,28 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.job.algorithm.Consumers; -import org.apache.hugegraph.schema.SchemaLabel; -import org.apache.hugegraph.schema.SchemaManager; -import org.apache.hugegraph.schema.VertexLabel; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.T; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.slf4j.Logger; - import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.iterator.ListIterator; import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.job.algorithm.AbstractAlgorithm; import org.apache.hugegraph.job.algorithm.AbstractAlgorithm.AlgoTraverser; +import org.apache.hugegraph.job.algorithm.Consumers; +import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.schema.SchemaManager; +import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.Log; import org.apache.hugegraph.util.StringEncoding; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableMap; public class LouvainTraverser extends AlgoTraverser { @@ -107,7 +107,7 @@ private void defineSchemaOfPk() { if (this.graph().existsVertexLabel(label) || this.graph().existsEdgeLabel(label)) { throw new IllegalArgumentException( - "Please clear historical results before proceeding"); + "Please clear historical results before proceeding"); } SchemaManager schema = this.graph().schema(); @@ -136,7 +136,7 @@ private void defineSchemaOfPassN(int pass) { .create(); } catch (ExistedException e) { throw new IllegalArgumentException( - "Please clear historical results before proceeding", e); + "Please clear historical results before proceeding", e); } } @@ -452,11 +452,11 @@ private void mergeCommunities(int pass) { LOG.info("Merge community for pass {}", pass); // merge each community as a vertex Collection>> comms = this.cache.communities(); - assert this.skipIsolated || this.allMembersExist(comms, pass - 1); + assert this.skipIsolated || this.allMembersExist(comms, pass - 1); this.cache.resetVertexWeight(); Consumers>> consumers = new Consumers<>( - this.executor, pair -> { + this.executor, pair -> { // called by multi-threads this.mergeCommunity(pass, pair.getLeft(), pair.getRight()); }, () -> { @@ -551,10 +551,10 @@ private boolean allMembersExist(int pass) { tryNext(this.g.V().hasLabel(label).count()).longValue(); } else { expected = tryNext(this.g.V().hasLabel(labelOfPassN(lastPass)) - .values(C_WEIGHT).sum()); + .values(C_WEIGHT).sum()); } Number actual = tryNext(this.g.V().hasLabel(label) - .values(C_WEIGHT).sum()); + .values(C_WEIGHT).sum()); boolean allExist = actual.floatValue() == expected.floatValue(); assert allExist : actual + "!=" + expected; return allExist; @@ -821,7 +821,7 @@ public boolean equals(Object object) { @Override public String toString() { return String.format("[%s](size=%s weight=%s kin=%s kout=%s)", - this.cid , this.size, this.weight, + this.cid, this.size, this.weight, this.kin, this.kout); } } @@ -900,7 +900,7 @@ public Id genId2(int pass, Id cid) { return IdGenerator.of(id); } - public Collection>> communities(){ + public Collection>> communities() { // TODO: get communities from backend store instead of ram Map>> comms = new HashMap<>(); for (Entry e : this.vertex2Community.entrySet()) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LpaAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LpaAlgorithm.java index 70d677a0f6..6e92921284 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LpaAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/LpaAlgorithm.java @@ -27,15 +27,15 @@ import org.apache.commons.lang3.mutable.MutableInt; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.schema.SchemaManager; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.process.traversal.Scope; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.job.UserJob; -import org.apache.hugegraph.type.define.Directions; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableMap; public class LpaAlgorithm extends AbstractCommAlgorithm { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/WeakConnectedComponent.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/WeakConnectedComponent.java index d26fde6e16..674ec50235 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/WeakConnectedComponent.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/WeakConnectedComponent.java @@ -24,17 +24,17 @@ import java.util.Map; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.schema.VertexLabel; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.slf4j.Logger; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.schema.SchemaManager; +import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableMap; public class WeakConnectedComponent extends AbstractCommAlgorithm { @@ -101,16 +101,16 @@ public Object connectedComponent(int maxTimes, } } else { changeCount += this.findAndSetMinComponent( - currentSourceVertexId, - adjacentVertices); + currentSourceVertexId, + adjacentVertices); adjacentVertices = new ArrayList<>(); currentSourceVertexId = sourceVertexId; adjacentVertices.add(targetVertexId); } } changeCount += this.findAndSetMinComponent( - currentSourceVertexId, - adjacentVertices); + currentSourceVertexId, + adjacentVertices); LOG.debug("iterationTimes:{}, changeCount:{}", times, changeCount); @@ -145,7 +145,8 @@ private void initVertexComponentMap() { /** * process for a vertex and its adjacentVertices - * @param sourceVertexId the source vertex + * + * @param sourceVertexId the source vertex * @param adjacentVertices the adjacent vertices attached to source * vertex * @return the count of vertex that changed Component diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/rank/PageRankAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/rank/PageRankAlgorithm.java index f5d43bb132..5c11c37e1f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/rank/PageRankAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/rank/PageRankAlgorithm.java @@ -17,27 +17,26 @@ package org.apache.hugegraph.job.algorithm.rank; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.traversal.algorithm.HugeTraverser; - import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.slf4j.Logger; - +import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.job.algorithm.comm.AbstractCommAlgorithm; import org.apache.hugegraph.schema.SchemaManager; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableMap; public class PageRankAlgorithm extends AbstractCommAlgorithm { @@ -96,11 +95,11 @@ public Traverser(UserJob job) { * If topN > 0, then return topN elements with rank value in json. */ private Object pageRank(double alpha, - int maxTimes, - double precision, - long degree, - Directions direction, - long topN) { + int maxTimes, + double precision, + long degree, + Directions direction, + long topN) { this.initSchema(); int times; @@ -155,10 +154,10 @@ private Object pageRank(double alpha, if (topN > 0) { Object topNJson = this.getTopRank(topN); return ImmutableMap.of("alpha", alpha, - "iteration_times", times, - "last_changed_rank", changedRank, - "times", maxTimes, - "top", topNJson); + "iteration_times", times, + "last_changed_rank", changedRank, + "times", maxTimes, + "top", topNJson); } return ImmutableMap.of("alpha", alpha, "iteration_times", times, diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/similarity/FusiformSimilarityAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/similarity/FusiformSimilarityAlgorithm.java index 8462e1c95e..0d153091ee 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/similarity/FusiformSimilarityAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/similarity/FusiformSimilarityAlgorithm.java @@ -20,8 +20,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicLong; -import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.job.algorithm.AbstractAlgorithm; @@ -32,6 +30,7 @@ import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.ParameterUtil; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; public class FusiformSimilarityAlgorithm extends AbstractAlgorithm { @@ -164,7 +163,7 @@ public Object fusiformSimilars(String sourceLabel, String sourceCLabel, HugeGraph graph = this.graph(); FusiformSimilarityTraverser traverser = - new FusiformSimilarityTraverser(graph); + new FusiformSimilarityTraverser(graph); AtomicLong count = new AtomicLong(0L); JsonMap similarsJson = new JsonMap(); @@ -172,11 +171,11 @@ public Object fusiformSimilars(String sourceLabel, String sourceCLabel, this.traverse(sourceLabel, sourceCLabel, v -> { SimilarsMap similars = traverser.fusiformSimilarity( - IteratorUtils.of(v), direction, - label, minNeighbors, alpha, - minSimilars, (int) topSimilars, - groupProperty, minGroups, degree, - MAX_CAPACITY, NO_LIMIT, true); + IteratorUtils.of(v), direction, + label, minNeighbors, alpha, + minSimilars, (int) topSimilars, + groupProperty, minGroups, degree, + MAX_CAPACITY, NO_LIMIT, true); if (similars.isEmpty()) { return; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/AbstractComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/AbstractComputer.java index 73400b4620..5158ec7916 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/AbstractComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/AbstractComputer.java @@ -90,7 +90,7 @@ public Object call(Job job, Map parameters) { this.initializeConfig((ComputerJob) job); } catch (Exception e) { throw new HugeException( - "Failed to initialize computer config file", e); + "Failed to initialize computer config file", e); } // Set current computer job's specified parameters @@ -112,8 +112,8 @@ public Object call(Job job, Map parameters) { StringBuilder output = new StringBuilder(); try (LineNumberReader reader = new LineNumberReader( - new InputStreamReader( - process.getInputStream()))) { + new InputStreamReader( + process.getInputStream()))) { String line; while ((line = reader.readLine()) != null) { output.append(line).append("\n"); @@ -165,7 +165,7 @@ private Map readEnvConfig() { private Map readSubConfig(String sub) { List> nodes = - this.config.childConfigurationsAt(sub); + this.config.childConfigurationsAt(sub); E.checkArgument(nodes.size() >= 1, "'%s' must be contained in config '%s'", sub); @@ -176,9 +176,9 @@ private Map readSubConfig(String sub) { for (HierarchicalConfiguration node : nodes) { NodeModel nodeModel = node.getNodeModel(); E.checkArgument(nodeModel != null && - (nodeHandler = nodeModel.getNodeHandler()) != null && - (root = nodeHandler.getRootNode()) != null, - "Node '%s' must contain root", node); + (nodeHandler = nodeModel.getNodeHandler()) != null && + (root = nodeHandler.getRootNode()) != null, + "Node '%s' must contain root", node); results.put(root.getNodeName(), root.getValue()); } @@ -198,7 +198,7 @@ private String[] constructShellCommands(Map configs) { } protected abstract Map checkAndCollectParameters( - Map parameters); + Map parameters); protected static int maxSteps(Map parameters) { if (!parameters.containsKey(MAX_STEPS)) { @@ -259,8 +259,8 @@ protected static Directions parseDirection(Object direction) { return Directions.IN; } else { throw new IllegalArgumentException(String.format( - "The value of direction must be in [OUT, IN, BOTH], " + - "but got '%s'", direction)); + "The value of direction must be in [OUT, IN, BOTH], " + + "but got '%s'", direction)); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LouvainComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LouvainComputer.java index cbc2aa4a8a..f907205024 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LouvainComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LouvainComputer.java @@ -19,9 +19,10 @@ import java.util.Map; -import org.apache.hugegraph.util.ParameterUtil; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.ParameterUtil; + import com.google.common.collect.ImmutableMap; public class LouvainComputer extends AbstractComputer { @@ -64,7 +65,7 @@ public void checkParameters(Map parameters) { @Override protected Map checkAndCollectParameters( - Map parameters) { + Map parameters) { return ImmutableMap.of(TIMES, times(parameters), PRECISION, precision(parameters), DIRECTION, direction(parameters), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LpaComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LpaComputer.java index 61c6bb8dbf..89618ee7cf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LpaComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/LpaComputer.java @@ -21,6 +21,7 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.ParameterUtil; + import com.google.common.collect.ImmutableMap; public class LpaComputer extends AbstractComputer { @@ -51,7 +52,7 @@ public void checkParameters(Map parameters) { @Override protected Map checkAndCollectParameters( - Map parameters) { + Map parameters) { return ImmutableMap.of(TIMES, times(parameters), PROPERTY, property(parameters), PRECISION, precision(parameters), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/PageRankComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/PageRankComputer.java index 89fe1b67ea..51388c08c0 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/PageRankComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/PageRankComputer.java @@ -21,6 +21,7 @@ import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.ParameterUtil; + import com.google.common.collect.ImmutableMap; public class PageRankComputer extends AbstractComputer { @@ -49,7 +50,7 @@ public void checkParameters(Map parameters) { @Override protected Map checkAndCollectParameters( - Map parameters) { + Map parameters) { return ImmutableMap.of(MAX_STEPS, maxSteps(parameters), ALPHA, alpha(parameters), PRECISION, precision(parameters)); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/TriangleCountComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/TriangleCountComputer.java index e794f4ba76..73206f24ac 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/TriangleCountComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/TriangleCountComputer.java @@ -43,7 +43,7 @@ public void checkParameters(Map parameters) { @Override protected Map checkAndCollectParameters( - Map parameters) { + Map parameters) { return ImmutableMap.of(DIRECTION, direction(parameters), DEGREE, degree(parameters)); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/WeakConnectedComponentComputer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/WeakConnectedComponentComputer.java index 60c740f2e1..d84522b289 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/WeakConnectedComponentComputer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/computer/WeakConnectedComponentComputer.java @@ -43,7 +43,7 @@ public void checkParameters(Map parameters) { @Override protected Map checkAndCollectParameters( - Map parameters) { + Map parameters) { return ImmutableMap.of(MAX_STEPS, maxSteps(parameters), PRECISION, precision(parameters)); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java index 27b97f4745..f3f9599b0b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java @@ -19,13 +19,14 @@ import java.util.Set; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.LockUtil; -import org.apache.hugegraph.HugeGraphParams; + import com.google.common.collect.ImmutableSet; public class EdgeLabelRemoveJob extends SchemaJob { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java index 4c69591079..2158c10c34 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java @@ -25,18 +25,18 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.SchemaStatus; -import org.apache.hugegraph.util.LockUtil; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.SchemaElement; import org.apache.hugegraph.schema.SchemaLabel; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.SchemaStatus; +import org.apache.hugegraph.util.LockUtil; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; + import com.google.common.collect.ImmutableSet; public class IndexLabelRebuildJob extends SchemaJob { @@ -78,7 +78,7 @@ private void rebuildIndex(SchemaElement schema) { default: assert schema.type() == HugeType.PROPERTY_KEY; throw new AssertionError(String.format( - "The %s can't rebuild index", schema.type())); + "The %s can't rebuild index", schema.type())); } } @@ -185,7 +185,7 @@ private SchemaElement schemaElement() { return this.graph().indexLabel(id); default: throw new AssertionError(String.format( - "Invalid HugeType '%s' for rebuild", type)); + "Invalid HugeType '%s' for rebuild", type)); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java index 28e7acd03f..418eb6116f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java @@ -17,13 +17,13 @@ package org.apache.hugegraph.job.schema; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.LockUtil; -import org.apache.hugegraph.HugeGraphParams; public class IndexLabelRemoveJob extends SchemaJob { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java index a765ab150b..bb740a3c05 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java @@ -17,13 +17,13 @@ package org.apache.hugegraph.job.schema; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.LockUtil; -import org.apache.hugegraph.HugeGraphParams; public class OlapPropertyKeyClearJob extends IndexLabelRemoveJob { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java index 5b00c0a268..2f553635ac 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java @@ -26,10 +26,9 @@ import org.apache.hugegraph.job.SysJob; import org.apache.hugegraph.schema.SchemaElement; import org.apache.hugegraph.type.HugeType; -import org.slf4j.Logger; - import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public abstract class SchemaJob extends SysJob { @@ -82,21 +81,22 @@ public static String formatTaskName(HugeType type, Id id, String name) { /** * Use reflection to call SchemaTransaction.removeSchema(), * which is protected - * @param tx The remove operation actual executer - * @param schema the schema to be removed + * + * @param tx The remove operation actual executer + * @param schema the schema to be removed */ protected static void removeSchema(SchemaTransaction tx, SchemaElement schema) { try { Method method = SchemaTransaction.class - .getDeclaredMethod("removeSchema", - SchemaElement.class); + .getDeclaredMethod("removeSchema", + SchemaElement.class); method.setAccessible(true); method.invoke(tx, schema); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { throw new AssertionError( - "Can't call SchemaTransaction.removeSchema()", e); + "Can't call SchemaTransaction.removeSchema()", e); } } @@ -104,21 +104,22 @@ protected static void removeSchema(SchemaTransaction tx, /** * Use reflection to call SchemaTransaction.updateSchema(), * which is protected - * @param tx The update operation actual execute - * @param schema the schema to be updated + * + * @param tx The update operation actual execute + * @param schema the schema to be updated */ protected static void updateSchema(SchemaTransaction tx, SchemaElement schema) { try { Method method = SchemaTransaction.class - .getDeclaredMethod("updateSchema", - SchemaElement.class); + .getDeclaredMethod("updateSchema", + SchemaElement.class); method.setAccessible(true); method.invoke(tx, schema); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { throw new AssertionError( - "Can't call SchemaTransaction.updateSchema()", e); + "Can't call SchemaTransaction.updateSchema()", e); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java index 0a7660d1ba..f00f316ab9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java @@ -20,6 +20,8 @@ import java.util.List; import java.util.Set; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; @@ -27,8 +29,7 @@ import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.LockUtil; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraphParams; + import com.google.common.collect.ImmutableSet; public class VertexLabelRemoveJob extends SchemaJob { @@ -63,9 +64,9 @@ private static void removeVertexLabel(HugeGraphParams graph, Id id) { for (EdgeLabel edgeLabel : edgeLabels) { if (edgeLabel.linkWithLabel(id)) { throw new HugeException( - "Not allowed to remove vertex label '%s' " + - "because the edge label '%s' still link with it", - vertexLabel.name(), edgeLabel.name()); + "Not allowed to remove vertex label '%s' " + + "because the edge label '%s' still link with it", + vertexLabel.name(), edgeLabel.name()); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredElementJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredElementJob.java index 57dcc397e9..493bbdde59 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredElementJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredElementJob.java @@ -19,8 +19,8 @@ import java.util.Set; -import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.util.E; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredIndexJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredIndexJob.java index c154716de1..5556febb89 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredIndexJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredIndexJob.java @@ -20,12 +20,12 @@ import java.util.Iterator; import java.util.Set; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.tx.GraphTransaction; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.structure.HugeIndex; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; public class DeleteExpiredIndexJob extends DeleteExpiredJob { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredJob.java index ca5fed6112..9ffa9e955b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/DeleteExpiredJob.java @@ -17,18 +17,17 @@ package org.apache.hugegraph.job.system; -import org.apache.hugegraph.config.CoreOptions; -import org.apache.hugegraph.task.HugeTask; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.job.EphemeralJob; import org.apache.hugegraph.job.EphemeralJobBuilder; import org.apache.hugegraph.job.system.JobCounters.JobCounter; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.structure.HugeIndex; +import org.apache.hugegraph.task.HugeTask; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; public abstract class DeleteExpiredJob extends EphemeralJob { @@ -77,7 +76,7 @@ public static void asyncDeleteExpiredObject(HugeGraph graph, V object) { } public static EphemeralJob newDeleteExpiredElementJob( - JobCounter jobCounter, V object) { + JobCounter jobCounter, V object) { if (object instanceof HugeElement) { return new DeleteExpiredElementJob<>(jobCounter.elements()); } else { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/JobCounters.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/JobCounters.java index d117dbe6ed..8e76664096 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/JobCounters.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/system/JobCounters.java @@ -21,8 +21,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.structure.HugeIndex; @@ -90,6 +90,7 @@ public boolean addAndTriggerDelete(Object object) { /** * Try to add element in collection waiting to be deleted + * * @param element * @return true if we should create a new delete job, false otherwise */ @@ -103,6 +104,7 @@ public boolean addElementAndTriggerDelete(HugeElement element) { /** * Try to add edge in collection waiting to be deleted + * * @param index * @return true if we should create a new delete job, false otherwise */ diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/ClusterRole.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/ClusterRole.java index 1292693624..f85f29cc88 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/ClusterRole.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/ClusterRole.java @@ -87,9 +87,9 @@ public int hashCode() { @Override public String toString() { return "RoleStateData{" + - "node='" + node + '\'' + - ", clock=" + clock + - ", epoch=" + epoch + - '}'; + "node='" + node + '\'' + + ", clock=" + clock + + ", epoch=" + epoch + + '}'; } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/RoleElectionOptions.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/RoleElectionOptions.java index 9d04d1fc0f..748ec36bd0 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/RoleElectionOptions.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/RoleElectionOptions.java @@ -23,7 +23,7 @@ import org.apache.hugegraph.config.ConfigOption; import org.apache.hugegraph.config.OptionHolder; -public class RoleElectionOptions extends OptionHolder { +public class RoleElectionOptions extends OptionHolder { private RoleElectionOptions() { super(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardRoleListener.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardRoleListener.java index 716cf34e25..dbbea6d91e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardRoleListener.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardRoleListener.java @@ -33,7 +33,7 @@ public class StandardRoleListener implements RoleListener { private volatile boolean selfIsMaster; - public StandardRoleListener(TaskManager taskManager, + public StandardRoleListener(TaskManager taskManager, GlobalMasterInfo roleInfo) { this.taskManager = taskManager; this.taskManager.enableRoleElection(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphGremlinPlugin.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphGremlinPlugin.java index e21b3d7788..0b3b19ba7e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphGremlinPlugin.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphGremlinPlugin.java @@ -22,13 +22,13 @@ import java.util.Iterator; import java.util.Set; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeFactory; +import org.apache.hugegraph.util.ReflectionUtil; import org.apache.tinkerpop.gremlin.jsr223.AbstractGremlinPlugin; import org.apache.tinkerpop.gremlin.jsr223.DefaultImportCustomizer; import org.apache.tinkerpop.gremlin.jsr223.ImportCustomizer; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeFactory; -import org.apache.hugegraph.util.ReflectionUtil; import com.google.common.reflect.ClassPath; public class HugeGraphGremlinPlugin extends AbstractGremlinPlugin { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphPlugin.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphPlugin.java index d77c4816b6..7b7f87653c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphPlugin.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/plugin/HugeGraphPlugin.java @@ -17,9 +17,9 @@ package org.apache.hugegraph.plugin; -import org.apache.hugegraph.backend.store.BackendProviderFactory; import org.apache.hugegraph.analyzer.AnalyzerFactory; import org.apache.hugegraph.backend.serializer.SerializerFactory; +import org.apache.hugegraph.backend.store.BackendProviderFactory; import org.apache.hugegraph.config.OptionSpace; public interface HugeGraphPlugin { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java index db9d01ba0b..3aaf6e141d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java @@ -23,12 +23,13 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.builder.SchemaBuilder; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Frequency; import org.apache.hugegraph.util.E; + import com.google.common.base.Objects; public class EdgeLabel extends SchemaLabel { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java index e9c53e59f2..83bde16765 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java @@ -23,13 +23,14 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IndexType; import org.apache.hugegraph.util.E; + import com.google.common.base.Objects; public class IndexLabel extends SchemaElement { @@ -90,10 +91,10 @@ public HugeType queryType() { return HugeType.SYS_SCHEMA; default: throw new AssertionError(String.format( - "Query type of index label is either '%s' or '%s', " + - "but '%s' is used", - HugeType.VERTEX_LABEL, HugeType.EDGE_LABEL, - this.baseType)); + "Query type of index label is either '%s' or '%s', " + + "but '%s' is used", + HugeType.VERTEX_LABEL, HugeType.EDGE_LABEL, + this.baseType)); } } @@ -180,7 +181,7 @@ public static IndexLabel label(HugeType type) { return ILN_IL; default: throw new AssertionError(String.format( - "No primitive index label for '%s'", type)); + "No primitive index label for '%s'", type)); } } @@ -202,7 +203,7 @@ public static IndexLabel label(HugeGraph graph, Id id) { return ILN_IL; default: throw new AssertionError(String.format( - "No primitive index label for '%s'", id)); + "No primitive index label for '%s'", id)); } } return graph.indexLabel(id); @@ -238,8 +239,8 @@ public static SchemaLabel getBaseLabel(HugeGraph graph, break; default: throw new AssertionError(String.format( - "Unsupported base type '%s' of index label", - baseType)); + "Unsupported base type '%s' of index label", + baseType)); } E.checkArgumentNotNull(label, "Can't find the %s with name '%s'", diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java index 842a0d1d2a..00b7968c21 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java @@ -25,9 +25,9 @@ import java.util.Map; import java.util.Set; -import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; @@ -142,7 +142,7 @@ public String clazz() { return String.format("List<%s>", dataType); default: throw new AssertionError(String.format( - "Unsupported cardinality: '%s'", this.cardinality)); + "Unsupported cardinality: '%s'", this.cardinality)); } } @@ -162,7 +162,7 @@ public Class implementClazz() { break; default: throw new AssertionError(String.format( - "Unsupported cardinality: '%s'", this.cardinality)); + "Unsupported cardinality: '%s'", this.cardinality)); } return cls; } @@ -189,10 +189,11 @@ public T newValue() { /** * Check property value valid + * * @param value the property value to be checked data type and cardinality - * @param the property value class + * @param the property value class * @return true if data type and cardinality satisfy requirements, - * otherwise false + * otherwise false */ public boolean checkValueType(V value) { boolean valid; @@ -211,17 +212,18 @@ public boolean checkValueType(V value) { break; default: throw new AssertionError(String.format( - "Unsupported cardinality: '%s'", this.cardinality)); + "Unsupported cardinality: '%s'", this.cardinality)); } return valid; } /** * Check type of the value valid + * * @param value the property value to be checked data type * @param the property value original data type * @return true if the value is or can convert to the data type, - * otherwise false + * otherwise false */ private boolean checkDataType(V value) { return this.dataType().clazz().isInstance(value); @@ -229,10 +231,11 @@ private boolean checkDataType(V value) { /** * Check type of all the values(maybe some list properties) valid + * * @param values the property values to be checked data type - * @param the property value class + * @param the property value class * @return true if all the values are or can convert to the data type, - * otherwise false + * otherwise false */ private boolean checkDataType(Collection values) { boolean valid = true; @@ -280,8 +283,8 @@ public V validValue(V value) { return this.convValue(value); } catch (RuntimeException e) { throw new IllegalArgumentException(String.format( - "Invalid property value '%s' for key '%s': %s", - value, this.name(), e.getMessage())); + "Invalid property value '%s' for key '%s': %s", + value, this.name(), e.getMessage())); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java index a6ae5b6c24..707029e043 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java @@ -20,16 +20,16 @@ import java.util.Collections; import java.util.Map; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.tinkerpop.gremlin.structure.Graph; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.type.Nameable; import org.apache.hugegraph.type.Typeable; import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Graph; + import com.google.common.base.Objects; public abstract class SchemaElement implements Nameable, Typeable, @@ -158,7 +158,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return this.type().hashCode() ^ this.id.hashCode(); + return this.type().hashCode() ^ this.id.hashCode(); } @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaLabel.java index 3a36be69de..e20a3f89dc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaLabel.java @@ -22,18 +22,19 @@ import java.util.HashSet; import java.util.Set; -import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.Indexable; import org.apache.hugegraph.type.Propertiable; import org.apache.hugegraph.util.E; + import com.google.common.base.Objects; public abstract class SchemaLabel extends SchemaElement - implements Indexable, Propertiable { + implements Indexable, Propertiable { private final Set properties; private final Set nullableKeys; @@ -167,13 +168,13 @@ public static Id getLabelId(HugeGraph graph, HugeType type, Object label) { return graph.edgeLabel((String) label).id(); } else { throw new HugeException( - "Not support query from '%s' with label '%s'", - type, label); + "Not support query from '%s' with label '%s'", + type, label); } } else { throw new HugeException( - "The label type must be number or string, but got '%s'", - label.getClass()); + "The label type must be number or string, but got '%s'", + label.getClass()); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java index 16abf261aa..8e2260c6e5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java @@ -20,10 +20,8 @@ import java.util.List; import java.util.stream.Collectors; -import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.tinkerpop.gremlin.structure.Graph; - import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.schema.builder.EdgeLabelBuilder; import org.apache.hugegraph.schema.builder.IndexLabelBuilder; @@ -31,6 +29,7 @@ import org.apache.hugegraph.schema.builder.VertexLabelBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Graph; public class SchemaManager { @@ -97,26 +96,26 @@ public IndexLabel getIndexLabel(String name) { public List getPropertyKeys() { return this.graph.propertyKeys().stream() - .filter(pk -> !Graph.Hidden.isHidden(pk.name())) - .collect(Collectors.toList()); + .filter(pk -> !Graph.Hidden.isHidden(pk.name())) + .collect(Collectors.toList()); } public List getVertexLabels() { return this.graph.vertexLabels().stream() - .filter(vl -> !Graph.Hidden.isHidden(vl.name())) - .collect(Collectors.toList()); + .filter(vl -> !Graph.Hidden.isHidden(vl.name())) + .collect(Collectors.toList()); } public List getEdgeLabels() { return this.graph.edgeLabels().stream() - .filter(el -> !Graph.Hidden.isHidden(el.name())) - .collect(Collectors.toList()); + .filter(el -> !Graph.Hidden.isHidden(el.name())) + .collect(Collectors.toList()); } public List getIndexLabels() { return this.graph.indexLabels().stream() - .filter(il -> !Graph.Hidden.isHidden(il.name())) - .collect(Collectors.toList()); + .filter(il -> !Graph.Hidden.isHidden(il.name())) + .collect(Collectors.toList()); } public void copyFrom(SchemaManager schema) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/Userdata.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/Userdata.java index a3d2f6356f..d485e558b8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/Userdata.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/Userdata.java @@ -47,8 +47,8 @@ public static void check(Userdata userdata, Action action) { for (Map.Entry e : userdata.entrySet()) { if (e.getValue() == null) { throw new NotAllowException( - "Not allowed to pass null userdata value " + - "when create or append schema"); + "Not allowed to pass null userdata value " + + "when create or append schema"); } } break; @@ -58,7 +58,7 @@ public static void check(Userdata userdata, Action action) { break; default: throw new AssertionError(String.format( - "Unknown schema action '%s'", action)); + "Unknown schema action '%s'", action)); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java index 1b00e8d5d8..73ca9253e6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java @@ -23,12 +23,13 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IdStrategy; + import com.google.common.base.Objects; public class VertexLabel extends SchemaLabel { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java index 6137cae2e6..fb0bb331be 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java @@ -20,16 +20,16 @@ import java.util.Set; import java.util.function.Function; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.exception.ExistedException; +import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaElement; import org.apache.hugegraph.schema.VertexLabel; -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.exception.ExistedException; -import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.GraphMode; import org.apache.hugegraph.type.define.SchemaStatus; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java index c9f2ace0cd..606695ebfd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java @@ -26,27 +26,27 @@ import java.util.Set; import org.apache.commons.collections.CollectionUtils; - +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.hugegraph.schema.PropertyKey; -import org.apache.hugegraph.schema.Userdata; -import org.apache.hugegraph.schema.VertexLabel; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.Userdata; +import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Action; import org.apache.hugegraph.type.define.Frequency; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; + import com.google.common.collect.ImmutableList; public class EdgeLabelBuilder extends AbstractBuilder - implements EdgeLabel.Builder { + implements EdgeLabel.Builder { private Id id; private String name; @@ -114,7 +114,7 @@ public EdgeLabel build() { edgeLabel.ttl(this.ttl); if (this.ttlStartTime != null) { edgeLabel.ttlStartTime(this.graph().propertyKey( - this.ttlStartTime).id()); + this.ttlStartTime).id()); } edgeLabel.enableLabelIndex(this.enableLabelIndex == null || this.enableLabelIndex); @@ -170,6 +170,7 @@ public EdgeLabel create() { * Only sourceId, targetId, frequency, enableLabelIndex, properties, sortKeys, * nullableKeys are checked. * The id, ttl, ttlStartTime, userdata are not checked. + * * @param existedEdgeLabel to be compared with * @return true if this has same properties with existedVertexLabel */ @@ -426,15 +427,15 @@ private void checkProperties(Action action) { case ELIMINATE: if (!this.properties.isEmpty()) { throw new NotAllowException( - "Not support to eliminate properties " + - "for edge label currently"); + "Not support to eliminate properties " + + "for edge label currently"); } break; case DELETE: break; default: throw new AssertionError(String.format( - "Unknown schema action '%s'", action)); + "Unknown schema action '%s'", action)); } } @@ -444,8 +445,8 @@ private void checkNullableKeys(Action action) { if (action == Action.ELIMINATE) { if (!this.nullableKeys.isEmpty()) { throw new NotAllowException( - "Not support to eliminate nullableKeys " + - "for edge label currently"); + "Not support to eliminate nullableKeys " + + "for edge label currently"); } return; } @@ -459,7 +460,7 @@ private void checkNullableKeys(Action action) { Set appendProps = this.properties; E.checkArgument(CollectionUtil.union(originProps, appendProps) - .containsAll(this.nullableKeys), + .containsAll(this.nullableKeys), "The nullableKeys: %s to be created or appended " + "must belong to the origin/new properties: %s/%s ", this.nullableKeys, originProps, appendProps); @@ -476,7 +477,7 @@ private void checkNullableKeys(Action action) { if (action == Action.APPEND) { Collection newAddedProps = CollectionUtils.subtract( - appendProps, originProps); + appendProps, originProps); E.checkArgument(this.nullableKeys.containsAll(newAddedProps), "The new added properties: %s must be nullable", newAddedProps); @@ -534,28 +535,28 @@ private void checkRelation() { private void checkStableVars() { if (this.sourceLabel != null) { throw new NotAllowException( - "Not allowed to update source label " + - "for edge label '%s', it must be null", this.name); + "Not allowed to update source label " + + "for edge label '%s', it must be null", this.name); } if (this.targetLabel != null) { throw new NotAllowException( - "Not allowed to update target label " + - "for edge label '%s', it must be null", this.name); + "Not allowed to update target label " + + "for edge label '%s', it must be null", this.name); } if (this.frequency != Frequency.DEFAULT) { throw new NotAllowException( - "Not allowed to update frequency " + - "for edge label '%s'", this.name); + "Not allowed to update frequency " + + "for edge label '%s'", this.name); } if (!this.sortKeys.isEmpty()) { throw new NotAllowException( - "Not allowed to update sort keys " + - "for edge label '%s'", this.name); + "Not allowed to update sort keys " + + "for edge label '%s'", this.name); } if (this.enableLabelIndex != null) { throw new NotAllowException( - "Not allowed to update enable_label_index " + - "for edge label '%s'", this.name); + "Not allowed to update enable_label_index " + + "for edge label '%s'", this.name); } } @@ -600,8 +601,8 @@ private void checkUserdata(Action action) { for (Map.Entry e : this.userdata.entrySet()) { if (e.getValue() == null) { throw new NotAllowException( - "Not allowed pass null userdata value when " + - "create or append edge label"); + "Not allowed pass null userdata value when " + + "create or append edge label"); } } break; @@ -611,7 +612,7 @@ private void checkUserdata(Action action) { break; default: throw new AssertionError(String.format( - "Unknown schema action '%s'", action)); + "Unknown schema action '%s'", action)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java index d1f6052c5d..b84559760b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java @@ -25,11 +25,11 @@ import java.util.concurrent.TimeoutException; import java.util.function.BiPredicate; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; @@ -52,7 +52,7 @@ import org.apache.hugegraph.util.collection.IdSet; public class IndexLabelBuilder extends AbstractBuilder - implements IndexLabel.Builder { + implements IndexLabel.Builder { private Id id; private String name; @@ -124,6 +124,7 @@ public IndexLabel build() { * Check whether this has same properties with existedIndexLabel. * Only baseType, baseValue, indexType, indexFields are checked. * The id, checkExist, userdata are not checked. + * * @param existedIndexLabel to be compared with * @return true if this has same properties with existedIndexLabel */ @@ -268,7 +269,7 @@ public IndexLabel create() { graph.taskScheduler().waitUntilTaskCompleted(task, timeout); } catch (TimeoutException e) { throw new HugeException( - "Failed to wait index-creating task completed", e); + "Failed to wait index-creating task completed", e); } // Return index label without task-info @@ -587,7 +588,7 @@ private void checkRepeatIndex(SchemaLabel schemaLabel) { break; default: throw new AssertionError(String.format( - "Unsupported index type: %s", this.indexType)); + "Unsupported index type: %s", this.indexType)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java index ed00f874db..825ab4dd53 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java @@ -20,19 +20,19 @@ import java.util.Map; import java.util.concurrent.TimeoutException; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.hugegraph.schema.PropertyKey; -import org.apache.hugegraph.schema.SchemaElement; -import org.apache.hugegraph.schema.Userdata; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.exception.NotSupportException; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.schema.Userdata; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Action; import org.apache.hugegraph.type.define.AggregateType; @@ -42,7 +42,7 @@ import org.apache.hugegraph.util.E; public class PropertyKeyBuilder extends AbstractBuilder - implements PropertyKey.Builder { + implements PropertyKey.Builder { private Id id; private String name; @@ -98,6 +98,7 @@ public PropertyKey build() { * Check whether this has same properties with propertyKey. * Only dataType, cardinality, aggregateType are checked. * The id, checkExist, userdata are not checked. + * * @param propertyKey to be compared with * @return true if this has same properties with propertyKey */ @@ -117,12 +118,9 @@ private boolean hasSameProperties(PropertyKey propertyKey) { return false; } - if (this.writeType != propertyKey.writeType()) { - return false; - } + return this.writeType == propertyKey.writeType(); // all properties are same, return true. - return true; } @Override @@ -156,7 +154,7 @@ public SchemaElement.TaskWithSchema createWithTask() { public PropertyKey create() { // Create index label async SchemaElement.TaskWithSchema propertyKeyWithTask = - this.createWithTask(); + this.createWithTask(); Id task = propertyKeyWithTask.task(); if (task == IdGenerator.ZERO) { @@ -174,7 +172,7 @@ public PropertyKey create() { graph.taskScheduler().waitUntilTaskCompleted(task, timeout); } catch (TimeoutException e) { throw new HugeException( - "Failed to wait property key create task completed", e); + "Failed to wait property key create task completed", e); } // Return property key without task-info @@ -424,17 +422,17 @@ private void checkAggregateType() { if (this.aggregateType.isSum() && this.dataType.isDate()) { throw new NotAllowException( - "Not allowed to set aggregate type '%s' for " + - "property key '%s' with data type '%s'", - this.aggregateType, this.name, this.dataType); + "Not allowed to set aggregate type '%s' for " + + "property key '%s' with data type '%s'", + this.aggregateType, this.name, this.dataType); } if (this.aggregateType.isNumber() && !this.dataType.isNumber() && !this.dataType.isDate()) { throw new NotAllowException( - "Not allowed to set aggregate type '%s' for " + - "property key '%s' with data type '%s'", - this.aggregateType, this.name, this.dataType); + "Not allowed to set aggregate type '%s' for " + + "property key '%s' with data type '%s'", + this.aggregateType, this.name, this.dataType); } } @@ -445,22 +443,22 @@ private void checkOlap() { if (!this.graph().backendStoreFeatures().supportsOlapProperties()) { throw new NotSupportException( - "olap property key '%s' for backend '%s'", - this.name, this.graph().backend()); + "olap property key '%s' for backend '%s'", + this.name, this.graph().backend()); } if (!this.aggregateType.isNone()) { throw new NotAllowException( - "Not allowed to set aggregate type '%s' for olap " + - "property key '%s'", this.aggregateType, this.name); + "Not allowed to set aggregate type '%s' for olap " + + "property key '%s'", this.aggregateType, this.name); } if (this.writeType == WriteType.OLAP_RANGE && !this.dataType.isNumber() && !this.dataType.isDate()) { throw new NotAllowException( - "Not allowed to set write type to OLAP_RANGE for " + - "property key '%s' with data type '%s'", - this.name, this.dataType); + "Not allowed to set write type to OLAP_RANGE for " + + "property key '%s' with data type '%s'", + this.name, this.dataType); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java index 2dfe409985..edd34c4715 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java @@ -26,26 +26,26 @@ import java.util.Set; import org.apache.commons.collections.CollectionUtils; - +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.tx.SchemaTransaction; -import org.apache.hugegraph.schema.PropertyKey; -import org.apache.hugegraph.schema.Userdata; -import org.apache.hugegraph.schema.VertexLabel; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.Userdata; +import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Action; import org.apache.hugegraph.type.define.IdStrategy; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; + import com.google.common.collect.ImmutableList; public class VertexLabelBuilder extends AbstractBuilder - implements VertexLabel.Builder { + implements VertexLabel.Builder { private Id id; private String name; @@ -105,7 +105,7 @@ public VertexLabel build() { vertexLabel.ttl(this.ttl); if (this.ttlStartTime != null) { vertexLabel.ttlStartTime(this.graph().propertyKey( - this.ttlStartTime).id()); + this.ttlStartTime).id()); } // Assign properties for (String key : this.properties) { @@ -157,6 +157,7 @@ public VertexLabel create() { * Check whether this has same properties with existedVertexLabel. * Only properties, primaryKeys, nullableKeys, enableLabelIndex are checked. * The id, idStrategy, checkExist, userdata are not checked. + * * @param existedVertexLabel to be compared with * @return true if this has same properties with existedVertexLabel */ @@ -416,15 +417,15 @@ private void checkProperties(Action action) { case ELIMINATE: if (!this.properties.isEmpty()) { throw new NotAllowException( - "Not support to eliminate properties " + - "for vertex label currently"); + "Not support to eliminate properties " + + "for vertex label currently"); } break; case DELETE: break; default: throw new AssertionError(String.format( - "Unknown schema action '%s'", action)); + "Unknown schema action '%s'", action)); } } @@ -434,8 +435,8 @@ private void checkNullableKeys(Action action) { if (action == Action.ELIMINATE) { if (!this.nullableKeys.isEmpty()) { throw new NotAllowException( - "Not support to eliminate nullableKeys " + - "for vertex label currently"); + "Not support to eliminate nullableKeys " + + "for vertex label currently"); } return; } @@ -466,7 +467,7 @@ private void checkNullableKeys(Action action) { if (action == Action.APPEND) { Collection newAddedProps = CollectionUtils.subtract( - appendProps, originProps); + appendProps, originProps); E.checkArgument(this.nullableKeys.containsAll(newAddedProps), "The new added properties: %s must be nullable", newAddedProps); @@ -499,7 +500,7 @@ private void checkIdStrategy() { break; default: throw new AssertionError(String.format( - "Unknown id strategy '%s'", strategy)); + "Unknown id strategy '%s'", strategy)); } if (this.idStrategy == IdStrategy.PRIMARY_KEY) { this.checkPrimaryKeys(); @@ -531,18 +532,18 @@ private void checkPrimaryKeys() { private void checkStableVars() { if (!this.primaryKeys.isEmpty()) { throw new NotAllowException( - "Not allowed to update primary keys " + - "for vertex label '%s'", this.name); + "Not allowed to update primary keys " + + "for vertex label '%s'", this.name); } if (this.idStrategy != IdStrategy.DEFAULT) { throw new NotAllowException( - "Not allowed to update id strategy " + - "for vertex label '%s'", this.name); + "Not allowed to update id strategy " + + "for vertex label '%s'", this.name); } if (this.enableLabelIndex != null) { throw new NotAllowException( - "Not allowed to update enable_label_index " + - "for vertex label '%s'", this.name); + "Not allowed to update enable_label_index " + + "for vertex label '%s'", this.name); } } @@ -578,8 +579,8 @@ private void checkUserdata(Action action) { for (Map.Entry e : this.userdata.entrySet()) { if (e.getValue() == null) { throw new NotAllowException( - "Not allowed pass null userdata value when " + - "create or append edge label"); + "Not allowed pass null userdata value when " + + "create or append edge label"); } } break; @@ -589,7 +590,7 @@ private void checkUserdata(Action action) { break; default: throw new AssertionError(String.format( - "Unknown schema action '%s'", action)); + "Unknown schema action '%s'", action)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/security/HugeSecurityManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/security/HugeSecurityManager.java index e83959113d..7c4f33c546 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/security/HugeSecurityManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/security/HugeSecurityManager.java @@ -24,9 +24,9 @@ import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; -import org.apache.hugegraph.util.Log; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -35,9 +35,9 @@ public class HugeSecurityManager extends SecurityManager { private static final String USER_DIR = System.getProperty("user.dir"); private static final String USER_DIR_IDE = - USER_DIR.endsWith("hugegraph-dist") ? - USER_DIR.substring(0, USER_DIR.length() - 15) : - null; + USER_DIR.endsWith("hugegraph-dist") ? + USER_DIR.substring(0, USER_DIR.length() - 15) : + null; private static final String GREMLIN_SERVER_WORKER = "gremlin-server-exec"; private static final String TASK_WORKER = "task-worker"; @@ -49,7 +49,6 @@ public class HugeSecurityManager extends SecurityManager { private static final Set DENIED_PERMISSIONS = ImmutableSet.of("setSecurityManager"); - private static final Set ACCEPT_CLASS_LOADERS = ImmutableSet.of( "groovy.lang.GroovyClassLoader", "sun.reflect.DelegatingClassLoader", @@ -132,7 +131,7 @@ public class HugeSecurityManager extends SecurityManager { public static void ignoreCheckedClass(String clazz) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to add ignore check via Gremlin"); + "Not allowed to add ignore check via Gremlin"); } IGNORE_CHECKED_CLASSES.add(clazz); @@ -143,7 +142,7 @@ public void checkPermission(Permission permission) { if (DENIED_PERMISSIONS.contains(permission.getName()) && callFromGremlin()) { throw newSecurityException( - "Not allowed to access denied permission via Gremlin"); + "Not allowed to access denied permission via Gremlin"); } } @@ -152,7 +151,7 @@ public void checkPermission(Permission permission, Object context) { if (DENIED_PERMISSIONS.contains(permission.getName()) && callFromGremlin()) { throw newSecurityException( - "Not allowed to access denied permission via Gremlin"); + "Not allowed to access denied permission via Gremlin"); } } @@ -160,7 +159,7 @@ public void checkPermission(Permission permission, Object context) { public void checkCreateClassLoader() { if (!callFromAcceptClassLoaders() && callFromGremlin()) { throw newSecurityException( - "Not allowed to create class loader via Gremlin"); + "Not allowed to create class loader via Gremlin"); } super.checkCreateClassLoader(); } @@ -169,7 +168,7 @@ public void checkCreateClassLoader() { public void checkLink(String lib) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to link library via Gremlin"); + "Not allowed to link library via Gremlin"); } super.checkLink(lib); } @@ -181,7 +180,7 @@ public void checkAccess(Thread thread) { !callFromBackendThread() && !callFromBackendHbase() && !callFromRaft() && !callFromSofaRpc() && !callFromIgnoreCheckedClass()) { throw newSecurityException( - "Not allowed to access thread via Gremlin"); + "Not allowed to access thread via Gremlin"); } super.checkAccess(thread); } @@ -194,7 +193,7 @@ public void checkAccess(ThreadGroup threadGroup) { !callFromRaft() && !callFromSofaRpc() && !callFromIgnoreCheckedClass()) { throw newSecurityException( - "Not allowed to access thread group via Gremlin"); + "Not allowed to access thread group via Gremlin"); } super.checkAccess(threadGroup); } @@ -203,7 +202,7 @@ public void checkAccess(ThreadGroup threadGroup) { public void checkExit(int status) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to call System.exit() via Gremlin"); + "Not allowed to call System.exit() via Gremlin"); } super.checkExit(status); } @@ -212,7 +211,7 @@ public void checkExit(int status) { public void checkExec(String cmd) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to execute command via Gremlin"); + "Not allowed to execute command via Gremlin"); } super.checkExec(cmd); } @@ -233,7 +232,7 @@ public void checkRead(String file) { !callFromSnapshot() && !callFromRaft() && !callFromSofaRpc()) { throw newSecurityException( - "Not allowed to read file via Gremlin: %s", file); + "Not allowed to read file via Gremlin: %s", file); } super.checkRead(file); } @@ -242,7 +241,7 @@ public void checkRead(String file) { public void checkRead(String file, Object context) { if (callFromGremlin() && !callFromRaft() && !callFromSofaRpc()) { throw newSecurityException( - "Not allowed to read file via Gremlin: %s", file); + "Not allowed to read file via Gremlin: %s", file); } super.checkRead(file, context); } @@ -269,7 +268,7 @@ public void checkWrite(String file) { public void checkDelete(String file) { if (callFromGremlin() && !callFromSnapshot()) { throw newSecurityException( - "Not allowed to delete file via Gremlin"); + "Not allowed to delete file via Gremlin"); } super.checkDelete(file); } @@ -278,7 +277,7 @@ public void checkDelete(String file) { public void checkListen(int port) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to listen socket via Gremlin"); + "Not allowed to listen socket via Gremlin"); } super.checkListen(port); } @@ -287,7 +286,7 @@ public void checkListen(int port) { public void checkAccept(String host, int port) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to accept socket via Gremlin"); + "Not allowed to accept socket via Gremlin"); } super.checkAccept(host, port); } @@ -297,7 +296,7 @@ public void checkConnect(String host, int port) { if (callFromGremlin() && !callFromBackendSocket() && !callFromBackendHbase() && !callFromRaft() && !callFromSofaRpc()) { throw newSecurityException( - "Not allowed to connect socket via Gremlin"); + "Not allowed to connect socket via Gremlin"); } super.checkConnect(host, port); } @@ -306,7 +305,7 @@ public void checkConnect(String host, int port) { public void checkConnect(String host, int port, Object context) { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to connect socket via Gremlin"); + "Not allowed to connect socket via Gremlin"); } super.checkConnect(host, port, context); } @@ -332,7 +331,7 @@ public void checkMulticast(InetAddress maddr, byte ttl) { public void checkSetFactory() { if (callFromGremlin()) { throw newSecurityException( - "Not allowed to set socket factory via Gremlin"); + "Not allowed to set socket factory via Gremlin"); } super.checkSetFactory(); } @@ -342,7 +341,7 @@ public void checkPropertiesAccess() { if (callFromGremlin() && !callFromSofaRpc() && !callFromNewSecurityException()) { throw newSecurityException( - "Not allowed to access system properties via Gremlin"); + "Not allowed to access system properties via Gremlin"); } super.checkPropertiesAccess(); } @@ -354,7 +353,7 @@ public void checkPropertyAccess(String key) { !callFromSnapshot() && !callFromRaft() && !callFromSofaRpc()) { throw newSecurityException( - "Not allowed to access system property(%s) via Gremlin", key); + "Not allowed to access system property(%s) via Gremlin", key); } super.checkPropertyAccess(key); } @@ -398,12 +397,9 @@ private static SecurityException newSecurityException(String message, } private static boolean readGroovyInCurrentDir(String file) { - if (file != null && (USER_DIR != null && file.startsWith(USER_DIR) || - USER_DIR_IDE != null && file.startsWith(USER_DIR_IDE)) && - (file.endsWith(".class") || file.endsWith(".groovy"))) { - return true; - } - return false; + return file != null && (USER_DIR != null && file.startsWith(USER_DIR) || + USER_DIR_IDE != null && file.startsWith(USER_DIR_IDE)) && + (file.endsWith(".class") || file.endsWith(".groovy")); } private static boolean callFromGremlin() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/GraphType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/GraphType.java index 174c8e7de2..7cf5ee3b4d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/GraphType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/GraphType.java @@ -21,4 +21,5 @@ import org.apache.hugegraph.type.Typeable; public interface GraphType extends Nameable, Typeable { + } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java index 76237add21..12b0e3fc71 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java @@ -27,7 +27,9 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.serializer.BytesBuffer; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; @@ -35,16 +37,13 @@ import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; import org.apache.logging.log4j.util.Strings; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; - -import org.apache.hugegraph.backend.serializer.BytesBuffer; -import org.apache.hugegraph.perf.PerfUtil.Watched; -import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyProperty; import com.google.common.collect.ImmutableList; @@ -463,6 +462,7 @@ public HugeVertex otherVertex() { /** * Clear properties of the edge, and set `removed` true + * * @return a new edge */ public HugeEdge prepareRemoved() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdgeProperty.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdgeProperty.java index 6f78c0da7f..f89e4f0cf4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdgeProperty.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdgeProperty.java @@ -47,7 +47,7 @@ public void remove() { assert this.owner instanceof HugeEdge; EdgeLabel edgeLabel = ((HugeEdge) this.owner).schemaLabel(); E.checkArgument(edgeLabel.nullableKeys().contains( - this.propertyKey().id()), + this.propertyKey().id()), "Can't remove non-null edge property '%s'", this); this.owner.graph().removeEdgeProperty(this); } @@ -60,6 +60,7 @@ public boolean equals(Object obj) { return ElementHelper.areEqual(this, obj); } + @Override public int hashCode() { return ElementHelper.hashCode(this); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeElement.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeElement.java index a7c93333fe..f727dc2fcb 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeElement.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeElement.java @@ -46,7 +46,6 @@ import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.collection.CollectionFactory; import org.apache.tinkerpop.gremlin.structure.Element; -import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.eclipse.collections.api.iterator.IntIterator; @@ -55,7 +54,7 @@ public abstract class HugeElement implements Element, GraphType, Idfiable, Comparable { private static final MutableIntObjectMap> EMPTY_MAP = - CollectionFactory.newIntObjectMap(); + CollectionFactory.newIntObjectMap(); private static final int MAX_PROPERTIES = BytesBuffer.UINT16_MAX; private final HugeGraph graph; @@ -329,7 +328,7 @@ public HugeProperty addProperty(PropertyKey pkey, V value, } @Watched(prefix = "element") - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) private HugeProperty addProperty(PropertyKey pkey, V value, Supplier> supplier) { assert pkey.cardinality().multiple(); @@ -372,7 +371,7 @@ protected void copyProperties(HugeElement element) { this.properties = EMPTY_MAP; } else { this.properties = CollectionFactory.newIntObjectMap( - element.properties); + element.properties); } this.propLoaded = true; } @@ -414,8 +413,9 @@ public int compareTo(HugeElement o) { /** * Classify parameter list(pairs) from call request + * * @param keyValues The property key-value pair of the vertex or edge - * @return Key-value pairs that are classified and processed + * @return Key-value pairs that are classified and processed */ @Watched(prefix = "element") public static final ElementKeys classifyKeys(Object... keyValues) { @@ -488,8 +488,8 @@ protected static Id getIdValue(Object idValue) { // Throw if error type throw new UnsupportedOperationException(String.format( - "Invalid element id: %s(%s)", - idValue, idValue.getClass().getSimpleName())); + "Invalid element id: %s(%s)", + idValue, idValue.getClass().getSimpleName())); } @Watched(prefix = "element") diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeFeatures.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeFeatures.java index ec1c216a19..f8bdb8c75c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeFeatures.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeFeatures.java @@ -67,7 +67,7 @@ public String toString() { public class HugeGraphFeatures implements GraphFeatures { private final VariableFeatures variableFeatures = - new HugeVariableFeatures(); + new HugeVariableFeatures(); @Override public boolean supportsConcurrentAccess() { @@ -157,12 +157,12 @@ public boolean willAllowId(Object id) { } public class HugeVariableFeatures extends HugeDataTypeFeatures - implements VariableFeatures { + implements VariableFeatures { } public class HugeVertexPropertyFeatures extends HugeDataTypeFeatures - implements VertexPropertyFeatures { + implements VertexPropertyFeatures { @Override public boolean supportsRemoveProperty() { @@ -206,7 +206,7 @@ public boolean supportsCustomIds() { } public class HugeEdgePropertyFeatures extends HugeDataTypeFeatures - implements EdgePropertyFeatures { + implements EdgePropertyFeatures { @Override public boolean supportsMapValues() { @@ -231,10 +231,10 @@ public boolean supportsUniformListValues() { } public class HugeVertexFeatures extends HugeElementFeatures - implements VertexFeatures { + implements VertexFeatures { private final VertexPropertyFeatures vertexPropertyFeatures = - new HugeVertexPropertyFeatures(); + new HugeVertexPropertyFeatures(); @Override public boolean supportsUserSuppliedIds() { @@ -280,15 +280,15 @@ public boolean supportsDefaultLabel() { public String defaultLabel() { return HugeFeatures.this.graph - .option(CoreOptions.VERTEX_DEFAULT_LABEL); + .option(CoreOptions.VERTEX_DEFAULT_LABEL); } } public class HugeEdgeFeatures extends HugeElementFeatures - implements EdgeFeatures { + implements EdgeFeatures { private final EdgePropertyFeatures edgePropertyFeatures = - new HugeEdgePropertyFeatures(); + new HugeEdgePropertyFeatures(); @Override public EdgePropertyFeatures properties() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java index a942ae07dc..4d43840934 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java @@ -28,11 +28,11 @@ import org.apache.hugegraph.backend.id.Id.IdType; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.id.SplicingIdGenerator; +import org.apache.hugegraph.backend.serializer.BytesBuffer; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.SchemaElement; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.DataType; -import org.apache.hugegraph.backend.serializer.BytesBuffer; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.HashUtil; import org.apache.hugegraph.util.InsertionOrderUtil; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeProperty.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeProperty.java index 85990d2af0..cc96bd27b8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeProperty.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeProperty.java @@ -22,12 +22,11 @@ import org.apache.hugegraph.backend.id.SplicingIdGenerator; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; -import org.apache.hugegraph.util.E; - public abstract class HugeProperty implements Property, GraphType { protected final HugeElement owner; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java index b0c9e2f310..f81d8ab99e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java @@ -291,7 +291,7 @@ public HugeEdge constructEdge(String label, HugeVertex vertex, EdgeLabel edgeLabel = this.graph().edgeLabel(label); // Check link E.checkArgument(edgeLabel.checkLinkEqual(this.schemaLabel().id(), - vertex.schemaLabel().id()), + vertex.schemaLabel().id()), "Undefined link of edge label '%s': '%s' -> '%s'", label, this.label(), vertex.label()); // Check sortKeys @@ -303,14 +303,14 @@ public HugeEdge constructEdge(String label, HugeVertex vertex, // Check whether passed all non-null props @SuppressWarnings("unchecked") Collection nonNullKeys = CollectionUtils.subtract( - edgeLabel.properties(), - edgeLabel.nullableKeys()); + edgeLabel.properties(), + edgeLabel.nullableKeys()); if (!new HashSet<>(keys).containsAll(nonNullKeys)) { @SuppressWarnings("unchecked") Collection missed = CollectionUtils.subtract(nonNullKeys, keys); E.checkArgument(false, "All non-null property keys: %s " + - "of edge label '%s' must be set, " + - "but missed keys: %s", + "of edge label '%s' must be set, " + + "but missed keys: %s", this.graph().mapPkId2Name(nonNullKeys), edgeLabel.name(), this.graph().mapPkId2Name(missed)); @@ -327,6 +327,7 @@ public HugeEdge constructEdge(String label, HugeVertex vertex, /** * Add edge with direction OUT + * * @param edge the out edge */ @Watched @@ -342,6 +343,7 @@ public void addOutEdge(HugeEdge edge) { /** * Add edge with direction IN + * * @param edge the in edge */ @Watched @@ -599,6 +601,7 @@ public boolean valid() { /** * Clear edges/properties of the cloned vertex, and set `removed` true + * * @return a new vertex */ public HugeVertex prepareRemoved() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertexProperty.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertexProperty.java index de6c1ee837..4cef519799 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertexProperty.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertexProperty.java @@ -19,19 +19,18 @@ import java.util.Iterator; +import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; -import org.apache.hugegraph.exception.NotSupportException; -import org.apache.hugegraph.util.E; - public class HugeVertexProperty extends HugeProperty - implements VertexProperty { + implements VertexProperty { public HugeVertexProperty(HugeElement owner, PropertyKey key, V value) { super(owner, key, value); @@ -59,7 +58,7 @@ public void remove() { assert this.owner instanceof HugeVertex; VertexLabel vertexLabel = ((HugeVertex) this.owner).schemaLabel(); E.checkArgument(vertexLabel.nullableKeys().contains( - this.propertyKey().id()), + this.propertyKey().id()), "Can't remove non-null vertex property '%s'", this); this.owner.graph().removeVertexProperty(this); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/EphemeralJobQueue.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/EphemeralJobQueue.java index bfb00bff96..c7c840a882 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/EphemeralJobQueue.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/EphemeralJobQueue.java @@ -185,7 +185,8 @@ public Object execute() throws Exception { return result; } - private Object executeBatchJob(List> jobs, Object prevResult) throws Exception { + private Object executeBatchJob(List> jobs, Object prevResult) throws + Exception { GraphTransaction graphTx = this.params().systemTransaction(); GraphTransaction systemTx = this.params().graphTransaction(); Object result = prevResult; @@ -236,6 +237,7 @@ public Object call() throws Exception { } public interface Reduce { - T reduce(T t1, T t2); + + T reduce(T t1, T t2); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeServerInfo.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeServerInfo.java index c3fffecce6..a304b4f75b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeServerInfo.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeServerInfo.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.IndexLabel; @@ -35,21 +37,18 @@ import org.apache.hugegraph.type.define.DataType; import org.apache.hugegraph.type.define.NodeRole; import org.apache.hugegraph.type.define.SerialEnum; +import org.apache.hugegraph.util.DateUtil; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.util.DateUtil; -import org.apache.hugegraph.util.E; - public class HugeServerInfo { // Unit millisecond private static final long EXPIRED_INTERVAL = - TaskManager.SCHEDULE_PERIOD * 10; + TaskManager.SCHEDULE_PERIOD * 10; private Id id; private NodeRole role; @@ -202,7 +201,7 @@ public Map asMap() { public static HugeServerInfo fromVertex(Vertex vertex) { HugeServerInfo serverInfo = new HugeServerInfo((Id) vertex.id()); for (Iterator> iter = vertex.properties(); - iter.hasNext();) { + iter.hasNext(); ) { VertexProperty prop = iter.next(); serverInfo.property(prop.key(), prop.value()); } @@ -213,11 +212,8 @@ public boolean suitableFor(HugeTask task, long now) { if (task.computer() != this.role.computer()) { return false; } - if (this.updateTime.getTime() + EXPIRED_INTERVAL < now || - this.load() + task.load() > this.maxLoad) { - return false; - } - return true; + return this.updateTime.getTime() + EXPIRED_INTERVAL >= now && + this.load() + task.load() <= this.maxLoad; } public static Schema schema(HugeGraphParams graph) { @@ -288,7 +284,7 @@ private String[] initProperties() { public boolean existVertexLabel(String label) { return this.graph.schemaTransaction() - .getVertexLabel(label) != null; + .getVertexLabel(label) != null; } @SuppressWarnings("unused") diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java index 42e3d8923b..d716a0f00b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java @@ -29,28 +29,29 @@ import java.util.stream.Collector; import java.util.stream.Collectors; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.serializer.BytesBuffer; import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.exception.LimitExceedException; +import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.job.ComputerJob; +import org.apache.hugegraph.job.EphemeralJob; import org.apache.hugegraph.type.define.SerialEnum; -import org.apache.hugegraph.util.*; +import org.apache.hugegraph.util.Blob; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.InsertionOrderUtil; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.util.StringEncoding; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; -import org.apache.hugegraph.util.Blob; -import org.apache.hugegraph.util.JsonUtil; -import org.apache.hugegraph.util.StringEncoding; import org.slf4j.Logger; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.backend.serializer.BytesBuffer; -import org.apache.hugegraph.exception.LimitExceedException; -import org.apache.hugegraph.exception.NotFoundException; -import org.apache.hugegraph.job.ComputerJob; -import org.apache.hugegraph.job.EphemeralJob; - public class HugeTask extends FutureTask { private static final Logger LOG = Log.logger(HugeTask.class); @@ -324,9 +325,7 @@ public boolean fail(Throwable e) { LOG.warn("An exception occurred when running task: {}", this.id(), e); // Update status to FAILED if exception occurred(not interrupted) - if (this.result(TaskStatus.FAILED, e.toString())) { - return true; - } + return this.result(TaskStatus.FAILED, e.toString()); } return false; } @@ -347,7 +346,7 @@ protected void done() { LOG.error("An exception occurred when calling done()", e); } finally { StandardTaskScheduler scheduler = (StandardTaskScheduler) - this.scheduler(); + this.scheduler(); scheduler.taskDone(this); } } @@ -392,14 +391,14 @@ protected boolean checkDependenciesSuccess() { return false; } else if (task.status() == TaskStatus.CANCELLED) { this.result(TaskStatus.CANCELLED, String.format( - "Cancelled due to dependent task '%s' cancelled", - dependency)); + "Cancelled due to dependent task '%s' cancelled", + dependency)); this.done(); return false; } else if (task.status() == TaskStatus.FAILED) { this.result(TaskStatus.FAILED, String.format( - "Failed due to dependent task '%s' failed", - dependency)); + "Failed due to dependent task '%s' failed", + dependency)); this.done(); return false; } @@ -421,7 +420,7 @@ protected synchronized boolean status(TaskStatus status) { if (!this.completed()) { assert this.status.code() < status.code() || status == TaskStatus.RESTORING : - this.status + " => " + status + " (task " + this.id + ")"; + this.status + " => " + status + " (task " + this.id + ")"; this.status = status; return true; } @@ -466,7 +465,7 @@ protected void property(String key, Object value) { @SuppressWarnings("unchecked") Set values = (Set) value; this.dependencies = values.stream().map(IdGenerator::of) - .collect(toOrderSet()); + .collect(toOrderSet()); break; case P.INPUT: this.input = StringEncoding.decompress(((Blob) value).bytes(), @@ -535,7 +534,7 @@ protected synchronized Object[] asArray() { if (this.dependencies != null) { list.add(P.DEPENDENCIES); list.add(this.dependencies.stream().map(Id::asLong) - .collect(toOrderSet())); + .collect(toOrderSet())); } if (this.input != null) { @@ -587,7 +586,7 @@ public synchronized Map asMap(boolean withDetails) { } if (this.dependencies != null) { Set value = this.dependencies.stream().map(Id::asLong) - .collect(toOrderSet()); + .collect(toOrderSet()); map.put(Hidden.unHide(P.DEPENDENCIES), value); } @@ -620,7 +619,7 @@ public static HugeTask fromVertex(Vertex vertex) { HugeTask task = new HugeTask<>((Id) vertex.id(), null, callable); for (Iterator> iter = vertex.properties(); - iter.hasNext();) { + iter.hasNext(); ) { VertexProperty prop = iter.next(); task.property(prop.key(), prop.value()); } @@ -647,8 +646,8 @@ private void checkPropertySize(int propertyLength, String propertyName) { if (propertyLength > propertyLimit) { throw new LimitExceedException( - "Task %s size %s exceeded limit %s bytes", - P.unhide(propertyName), propertyLength, propertyLimit); + "Task %s size %s exceeded limit %s bytes", + P.unhide(propertyName), propertyLength, propertyLimit); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java index bca4e46c35..7290496f42 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java @@ -118,7 +118,7 @@ public synchronized void initServerInfo(GlobalMasterInfo nodeInfo) { while (servers.hasNext()) { existed = servers.next(); E.checkArgument(!existed.role().master() || !existed.alive(), - "Already existed master '%s' in current cluster", + "Already existed master '%s' in current cluster", existed.id()); } if (page != null) { @@ -324,7 +324,7 @@ private int save(Collection serverInfos) { private V call(Callable callable) { assert !Thread.currentThread().getName().startsWith( - "server-info-db-worker") : "can't call by itself"; + "server-info-db-worker") : "can't call by itself"; try { // Pass context for db thread callable = new TaskManager.ContextCallable<>(callable); @@ -391,7 +391,7 @@ protected void updateServerInfos(Collection serverInfos) { protected Collection allServerInfos() { Iterator infos = this.serverInfos(NO_LIMIT, null); try (ListIterator iter = new ListIterator<>( - MAX_SERVERS, infos)) { + MAX_SERVERS, infos)) { return iter.list(); } catch (Exception e) { throw new HugeException("Failed to close server info iterator", e); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java index bc50fce5b4..48a7508206 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java @@ -149,7 +149,7 @@ public void restoreTasks() { do { Iterator> iter; for (iter = this.findTask(status, PAGE_SIZE, page); - iter.hasNext();) { + iter.hasNext(); ) { HugeTask task = iter.next(); if (selfServer.equals(task.server())) { taskList.add(task); @@ -160,7 +160,7 @@ public void restoreTasks() { } } while (page != null); } - for (HugeTask task : taskList){ + for (HugeTask task : taskList) { LOG.info("restore task {}", task); this.restore(task); } @@ -328,7 +328,7 @@ protected synchronized void scheduleTasksOnMaster() { } HugeServerInfo server = this.serverManager().pickWorkerNode( - serverInfos, task); + serverInfos, task); if (server == null) { LOG.info("The master can't find suitable servers to " + "execute task '{}', wait for next schedule", @@ -443,7 +443,7 @@ protected void remove(HugeTask task) { HugeTask delTask = this.tasks.remove(task.id()); if (delTask != null && delTask != task) { LOG.warn("Task '{}' may be inconsistent status {}(expect {})", - task.id(), task.status(), delTask.status()); + task.id(), task.status(), delTask.status()); } assert delTask == null || delTask.completed() || delTask.cancelling() || delTask.isCancelled() : delTask; @@ -528,7 +528,7 @@ public Iterator> tasks(TaskStatus status, } public HugeTask findTask(Id id) { - HugeTask result = this.call(() -> { + HugeTask result = this.call(() -> { Iterator vertices = this.tx().queryVertices(id); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { @@ -595,13 +595,13 @@ public HugeTask delete(Id id) { @Override public HugeTask waitUntilTaskCompleted(Id id, long seconds) - throws TimeoutException { + throws TimeoutException { return this.waitUntilTaskCompleted(id, seconds, QUERY_INTERVAL); } @Override public HugeTask waitUntilTaskCompleted(Id id) - throws TimeoutException { + throws TimeoutException { // This method is just used by tests long timeout = this.graph.configuration() .get(CoreOptions.TASK_WAIT_TIMEOUT); @@ -610,10 +610,10 @@ public HugeTask waitUntilTaskCompleted(Id id) private HugeTask waitUntilTaskCompleted(Id id, long seconds, long intervalMs) - throws TimeoutException { + throws TimeoutException { long passes = seconds * 1000 / intervalMs; HugeTask task = null; - for (long pass = 0;; pass++) { + for (long pass = 0; ; pass++) { try { task = this.task(id); } catch (NotFoundException e) { @@ -635,15 +635,15 @@ private HugeTask waitUntilTaskCompleted(Id id, long seconds, sleep(intervalMs); } throw new TimeoutException(String.format( - "Task '%s' was not completed in %s seconds", id, seconds)); + "Task '%s' was not completed in %s seconds", id, seconds)); } @Override public void waitUntilAllTasksCompleted(long seconds) - throws TimeoutException { + throws TimeoutException { long passes = seconds * 1000 / QUERY_INTERVAL; int taskSize; - for (long pass = 0;; pass++) { + for (long pass = 0; ; pass++) { taskSize = this.pendingTasks(); if (taskSize == 0) { sleep(QUERY_INTERVAL); @@ -655,8 +655,8 @@ public void waitUntilAllTasksCompleted(long seconds) sleep(QUERY_INTERVAL); } throw new TimeoutException(String.format( - "There are still %s incomplete tasks after %s seconds", - taskSize, seconds)); + "There are still %s incomplete tasks after %s seconds", + taskSize, seconds)); } @Override @@ -711,7 +711,7 @@ private V call(Runnable runnable) { private V call(Callable callable) { assert !Thread.currentThread().getName().startsWith( - "task-db-worker") : "can't call by itself"; + "task-db-worker") : "can't call by itself"; try { // Pass task context for db thread callable = new ContextCallable<>(callable); @@ -804,7 +804,7 @@ public void initSchema() { private boolean existVertexLabel(String label) { return this.params().schemaTransaction() - .getVertexLabel(label) != null; + .getVertexLabel(label) != null; } private String[] initProperties() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskCallable.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskCallable.java index b19fb796da..ec066fbc04 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskCallable.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskCallable.java @@ -21,14 +21,14 @@ import java.util.Set; import java.util.concurrent.Callable; -import org.apache.tinkerpop.gremlin.structure.Transaction; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Transaction; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableSet; public abstract class TaskCallable implements Callable { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java index 14e91d301c..144387949b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java @@ -45,7 +45,7 @@ public final class TaskManager { public static final String TASK_WORKER = TASK_WORKER_PREFIX + "-%d"; public static final String TASK_DB_WORKER = "task-db-worker-%d"; public static final String SERVER_INFO_DB_WORKER = - "server-info-db-worker-%d"; + "server-info-db-worker-%d"; public static final String TASK_SCHEDULER = "task-scheduler-%d"; protected static final long SCHEDULE_PERIOD = 1000L; // unit ms @@ -73,12 +73,12 @@ private TaskManager(int pool) { this.taskExecutor = ExecutorUtil.newFixedThreadPool(pool, TASK_WORKER); // For save/query task state, just one thread is ok this.taskDbExecutor = ExecutorUtil.newFixedThreadPool( - 1, TASK_DB_WORKER); + 1, TASK_DB_WORKER); this.serverInfoDbExecutor = ExecutorUtil.newFixedThreadPool( - 1, SERVER_INFO_DB_WORKER); + 1, SERVER_INFO_DB_WORKER); // For schedule task to run, just one thread is ok this.schedulerExecutor = ExecutorUtil.newPausableScheduledThreadPool( - 1, TASK_SCHEDULER); + 1, TASK_SCHEDULER); // Start after 10x period time waiting for HugeGraphServer startup this.schedulerExecutor.scheduleWithFixedDelay(this::scheduleOrExecuteJob, 10 * SCHEDULE_PERIOD, @@ -90,8 +90,8 @@ public void addScheduler(HugeGraphParams graph) { E.checkArgumentNotNull(graph, "The graph can't be null"); TaskScheduler scheduler = new StandardTaskScheduler(graph, - this.taskExecutor, this.taskDbExecutor, - this.serverInfoDbExecutor); + this.taskExecutor, this.taskDbExecutor, + this.serverInfoDbExecutor); this.schedulers.put(graph, scheduler); } @@ -171,7 +171,7 @@ public TaskScheduler getScheduler(HugeGraphParams graph) { public ServerInfoManager getServerInfoManager(HugeGraphParams graph) { StandardTaskScheduler scheduler = (StandardTaskScheduler) - this.getScheduler(graph); + this.getScheduler(graph); if (scheduler == null) { return null; } @@ -275,7 +275,7 @@ public void onAsRoleWorker() { protected void notifyNewTask(HugeTask task) { Queue queue = ((ThreadPoolExecutor) this.schedulerExecutor) - .getQueue(); + .getQueue(); if (queue.size() <= 1) { /* * Notify to schedule tasks initiatively when have new task diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java index c65008d441..28eb232a2f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java @@ -22,8 +22,8 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; -import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; public interface TaskScheduler { @@ -53,13 +53,13 @@ Iterator> tasks(TaskStatus status, boolean close(); HugeTask waitUntilTaskCompleted(Id id, long seconds) - throws TimeoutException; + throws TimeoutException; HugeTask waitUntilTaskCompleted(Id id) - throws TimeoutException; + throws TimeoutException; void waitUntilAllTasksCompleted(long seconds) - throws TimeoutException; + throws TimeoutException; void checkRequirement(String op); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java index 0647f91592..14613ee8c6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java @@ -21,6 +21,7 @@ import java.util.Set; import org.apache.hugegraph.type.define.SerialEnum; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -41,10 +42,10 @@ public enum TaskStatus implements SerialEnum { // NOTE: order is important(RESTORING > RUNNING > QUEUED) when restoring public static final List PENDING_STATUSES = ImmutableList.of( - TaskStatus.RESTORING, TaskStatus.RUNNING, TaskStatus.QUEUED); + TaskStatus.RESTORING, TaskStatus.RUNNING, TaskStatus.QUEUED); public static final Set COMPLETED_STATUSES = ImmutableSet.of( - TaskStatus.SUCCESS, TaskStatus.CANCELLED, TaskStatus.FAILED); + TaskStatus.SUCCESS, TaskStatus.CANCELLED, TaskStatus.FAILED); private byte status; private String name; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java index 3ef81e538c..1b9defac46 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java @@ -25,13 +25,12 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.iterator.FilterIterator; import org.apache.hugegraph.iterator.FlatMapperIterator; import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public class CountTraverser extends HugeTraverser { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java index 53ced03c67..e75d016359 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java @@ -947,6 +947,7 @@ public void append(Id current) { } public static class EdgeRecord { + private final Map edgeMap; private final ObjectIntMapping idMapping; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/NeighborRankTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/NeighborRankTraverser.java index e5f99b4037..3e915c7dd9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/NeighborRankTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/NeighborRankTraverser.java @@ -22,17 +22,16 @@ import java.util.Map; import java.util.Set; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - -import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.OrderLimitMap; +import org.apache.tinkerpop.gremlin.structure.Edge; + +import jakarta.ws.rs.core.MultivaluedMap; public class NeighborRankTraverser extends HugeTraverser { @@ -162,7 +161,7 @@ private boolean belongToPrevLayers(List ranks, Id target, Ranks prevLayerRanks = ranks.get(i); if (prevLayerRanks.containsKey(target)) { Set nodes = prevLayerNodes.computeIfAbsent( - i, HugeTraverser::newSet); + i, HugeTraverser::newSet); nodes.add(target); return true; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java index 40fe914d49..44f9cf8692 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java @@ -49,7 +49,7 @@ import com.google.common.base.Objects; public abstract class OltpTraverser extends HugeTraverser - implements AutoCloseable { + implements AutoCloseable { private static final String EXECUTOR_NAME = "oltp"; private static Consumers.ExecutorPool executors; @@ -149,7 +149,7 @@ protected void traverseIdsByBfs(Iterator vertices, long capacity, Consumer consumer) { List labels = label == null ? Collections.emptyList() : - Collections.singletonList(label); + Collections.singletonList(label); OneStepEdgeIterConsumer edgeIterConsumer = new OneStepEdgeIterConsumer(consumer, capacity); EdgesIterator edgeIter = edgesOfVertices(vertices, dir, labels, degree); @@ -248,7 +248,7 @@ protected boolean match(Element elem, String key, Object value) { } public static class ConcurrentMultiValuedMap - extends ConcurrentHashMap> { + extends ConcurrentHashMap> { private static final long serialVersionUID = -7249946839643493614L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PersonalRankTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PersonalRankTraverser.java index 1a1e82cad9..972eff7c7a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PersonalRankTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PersonalRankTraverser.java @@ -27,11 +27,10 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.VertexLabel; -import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; - import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; public class PersonalRankTraverser extends HugeTraverser { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PredictionTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PredictionTraverser.java index 4a3df4d643..4b2cb69e57 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PredictionTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PredictionTraverser.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; + import com.google.common.collect.ImmutableList; public class PredictionTraverser extends OltpTraverser { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java index 4e9b88ec1b..a34f87289d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java @@ -198,8 +198,9 @@ public PathSet forward(boolean all) { this.edgeResults.addEdge(source, target, edge); PathSet paths = this.pathResults.findPath(target, - t -> !this.superNode(t, this.direction), - all, false); + t -> !this.superNode(t, + this.direction), + all, false); if (paths.isEmpty()) { continue; @@ -244,8 +245,8 @@ public PathSet backward(boolean all) { this.edgeResults.addEdge(source, target, edge); PathSet paths = this.pathResults.findPath(target, - t -> !this.superNode(t, opposite), - all, false); + t -> !this.superNode(t, opposite), + all, false); if (paths.isEmpty()) { continue; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/AbstractRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/AbstractRecords.java index 7b94454cb4..74f3db5ffa 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/AbstractRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/AbstractRecords.java @@ -19,12 +19,12 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.util.collection.ObjectIntMapping; -import org.apache.hugegraph.util.collection.ObjectIntMappingFactory; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.traversal.algorithm.records.record.Record; import org.apache.hugegraph.traversal.algorithm.records.record.RecordFactory; import org.apache.hugegraph.traversal.algorithm.records.record.RecordType; +import org.apache.hugegraph.util.collection.ObjectIntMapping; +import org.apache.hugegraph.util.collection.ObjectIntMappingFactory; public abstract class AbstractRecords implements Records { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/DoubleWayMultiPathsRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/DoubleWayMultiPathsRecords.java index 4600f0790a..33224dc92c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/DoubleWayMultiPathsRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/DoubleWayMultiPathsRecords.java @@ -22,14 +22,14 @@ import java.util.Stack; import org.apache.commons.collections.CollectionUtils; - import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.util.collection.IntIterator; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.traversal.algorithm.HugeTraverser.Path; import org.apache.hugegraph.traversal.algorithm.HugeTraverser.PathSet; import org.apache.hugegraph.traversal.algorithm.records.record.Record; import org.apache.hugegraph.traversal.algorithm.records.record.RecordType; +import org.apache.hugegraph.util.collection.IntIterator; + import com.google.common.collect.Lists; public abstract class DoubleWayMultiPathsRecords extends AbstractRecords { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java index f13bcbce6a..bd637ce1d4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java @@ -97,7 +97,7 @@ public void addFullPath(List edges) { if (this.sourceCode == targetCode) { break; } - + this.addPathToRecord(sourceCode, targetCode, record); sourceCode = targetCode; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/record/SyncRecord.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/record/SyncRecord.java index 4a113fb4ea..c743ed5a49 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/record/SyncRecord.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/record/SyncRecord.java @@ -31,7 +31,7 @@ public SyncRecord(Record record) { public SyncRecord(Record record, Object newLock) { if (record == null) { throw new IllegalArgumentException( - "Cannot create a SyncRecord on a null record"); + "Cannot create a SyncRecord on a null record"); } else { this.record = record; this.lock = newLock == null ? this : newLock; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/EdgeStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/EdgeStep.java index 06cfd66066..caff267b7d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/EdgeStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/EdgeStep.java @@ -28,12 +28,12 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; -import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.optimize.TraversalUtil; +import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/RepeatEdgeStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/RepeatEdgeStep.java index 29a3572c18..1ab5fb1e0e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/RepeatEdgeStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/RepeatEdgeStep.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.type.define.Directions; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/WeightedEdgeStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/WeightedEdgeStep.java index 81b3cffd33..b288420ff2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/WeightedEdgeStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/WeightedEdgeStep.java @@ -28,6 +28,7 @@ import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/ConcurrentTraverseStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/ConcurrentTraverseStrategy.java index 2110df8f80..235a3c2d98 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/ConcurrentTraverseStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/ConcurrentTraverseStrategy.java @@ -29,7 +29,7 @@ import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; public class ConcurrentTraverseStrategy extends OltpTraverser - implements TraverseStrategy { + implements TraverseStrategy { public ConcurrentTraverseStrategy(HugeGraph graph) { super(graph); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/SingleTraverseStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/SingleTraverseStrategy.java index a3bb7ada50..8516645cdd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/SingleTraverseStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/SingleTraverseStrategy.java @@ -22,16 +22,16 @@ import java.util.Set; import java.util.function.BiConsumer; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.OltpTraverser; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; + +import jakarta.ws.rs.core.MultivaluedMap; public class SingleTraverseStrategy extends OltpTraverser - implements TraverseStrategy { + implements TraverseStrategy { public SingleTraverseStrategy(HugeGraph graph) { super(graph); @@ -65,7 +65,7 @@ public void addNode(Map> vertices, Id id, Node node) { public void addNewVerticesToAll(Map> newVertices, Map> targets) { MultivaluedMap vertices = - (MultivaluedMap) targets; + (MultivaluedMap) targets; for (Map.Entry> entry : newVertices.entrySet()) { vertices.addAll(entry.getKey(), entry.getValue()); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/TraverseStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/TraverseStrategy.java index e3c21cfa63..7440133038 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/TraverseStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/strategy/TraverseStrategy.java @@ -24,8 +24,8 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; public interface TraverseStrategy { @@ -44,6 +44,6 @@ void addNewVerticesToAll(Map> newVertices, static TraverseStrategy create(boolean concurrent, HugeGraph graph) { return concurrent ? new ConcurrentTraverseStrategy(graph) : - new SingleTraverseStrategy(graph); + new SingleTraverseStrategy(graph); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStep.java index 8bf2e84644..403bf5be83 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStep.java @@ -20,6 +20,7 @@ import java.util.NoSuchElementException; import java.util.Objects; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.process.traversal.Step; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.Traverser.Admin; @@ -27,10 +28,8 @@ import org.apache.tinkerpop.gremlin.process.traversal.util.FastNoSuchElementException; import org.apache.tinkerpop.gremlin.structure.Element; -import org.apache.hugegraph.util.E; - public final class HugeCountStep - extends AbstractStep { + extends AbstractStep { private static final long serialVersionUID = -679873894532085972L; @@ -44,6 +43,7 @@ public HugeCountStep(final Traversal.Admin traversal, this.originGraphStep = originGraphStep; } + @Override public boolean equals(Object obj) { if (!(obj instanceof HugeCountStep)) { return false; @@ -69,7 +69,7 @@ protected Admin processNextStart() throws NoSuchElementException { throw FastNoSuchElementException.instance(); } this.done = true; - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) Step step = (Step) this; return this.getTraversal().getTraverserGenerator() .generate(this.originGraphStep.count(), step, 1L); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStepStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStepStrategy.java index 4bb3a33c88..ef380d36b2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStepStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeCountStepStrategy.java @@ -40,8 +40,8 @@ import org.apache.tinkerpop.gremlin.structure.Element; public final class HugeCountStepStrategy - extends AbstractTraversalStrategy - implements ProviderOptimizationStrategy { + extends AbstractTraversalStrategy + implements ProviderOptimizationStrategy { private static final long serialVersionUID = -3910433925919057771L; @@ -56,13 +56,13 @@ private HugeCountStepStrategy() { } @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public void apply(Traversal.Admin traversal) { TraversalUtil.convAllHasSteps(traversal); // Extract CountGlobalStep List steps = TraversalHelper.getStepsOfClass( - CountGlobalStep.class, traversal); + CountGlobalStep.class, traversal); if (steps.isEmpty()) { return; } @@ -78,12 +78,12 @@ public void apply(Traversal.Admin traversal) { step instanceof IdentityStep || step instanceof NoOpBarrierStep || step instanceof CollectingBarrierStep) || - (step instanceof TraversalParent && - TraversalHelper.anyStepRecursively(s -> { - return s instanceof SideEffectStep || - s instanceof AggregateGlobalStep || - s instanceof AggregateLocalStep; - }, (TraversalParent) step))) { + (step instanceof TraversalParent && + TraversalHelper.anyStepRecursively(s -> { + return s instanceof SideEffectStep || + s instanceof AggregateGlobalStep || + s instanceof AggregateLocalStep; + }, (TraversalParent) step))) { return; } originSteps.add(step); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStep.java index 21e624e866..bdfb9e0b66 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStep.java @@ -28,6 +28,7 @@ import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.Log; import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep; import org.apache.tinkerpop.gremlin.process.traversal.step.util.HasContainer; import org.apache.tinkerpop.gremlin.structure.Element; @@ -35,10 +36,8 @@ import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import org.slf4j.Logger; -import org.apache.hugegraph.util.Log; - public final class HugeGraphStep - extends GraphStep implements QueryHolder { + extends GraphStep implements QueryHolder { private static final long serialVersionUID = -679873894532085972L; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStepStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStepStrategy.java index af1511e152..cc3ef3f5ff 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStepStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeGraphStepStrategy.java @@ -28,8 +28,8 @@ import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper; public final class HugeGraphStepStrategy - extends AbstractTraversalStrategy - implements ProviderOptimizationStrategy { + extends AbstractTraversalStrategy + implements ProviderOptimizationStrategy { private static final long serialVersionUID = -2952498905649139719L; @@ -44,13 +44,13 @@ private HugeGraphStepStrategy() { } @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public void apply(Traversal.Admin traversal) { TraversalUtil.convAllHasSteps(traversal); // Extract conditions in GraphStep List steps = TraversalHelper.getStepsOfClass( - GraphStep.class, traversal); + GraphStep.class, traversal); for (GraphStep originStep : steps) { TraversalUtil.trySetGraph(originStep, TraversalUtil.tryGetGraph(steps.get(0))); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugePrimaryKeyStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugePrimaryKeyStrategy.java index 95dc187b5a..e6fa880837 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugePrimaryKeyStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugePrimaryKeyStrategy.java @@ -33,8 +33,8 @@ import org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality; public class HugePrimaryKeyStrategy - extends AbstractTraversalStrategy - implements ProviderOptimizationStrategy { + extends AbstractTraversalStrategy + implements ProviderOptimizationStrategy { private static final long serialVersionUID = 6307847098226016416L; private static final HugePrimaryKeyStrategy INSTANCE = new HugePrimaryKeyStrategy(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java index 66d40e9b29..0ccb48a54a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java @@ -36,7 +36,7 @@ /** * ScriptTraversal encapsulates a {@link ScriptEngine} and a script which is * compiled into a {@link Traversal} at {@link Admin#applyStrategies()}. - * + *

* This is useful for serializing traversals as the compilation can happen on * the remote end where the traversal will ultimately be processed. * @@ -101,7 +101,7 @@ public void applyStrategies() throws IllegalStateException { Object result = engine.eval(this.script, bindings); if (result instanceof Admin) { - @SuppressWarnings({ "unchecked"}) + @SuppressWarnings({"unchecked"}) Admin traversal = (Admin) result; traversal.getSideEffects().mergeInto(this.sideEffects); traversal.getSteps().forEach(this::addStep); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStep.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStep.java index 1132bc83fa..1a3fc58bea 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStep.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStep.java @@ -30,6 +30,7 @@ import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.Log; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.step.map.VertexStep; import org.apache.tinkerpop.gremlin.process.traversal.step.util.HasContainer; @@ -39,10 +40,8 @@ import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import org.slf4j.Logger; -import org.apache.hugegraph.util.Log; - public class HugeVertexStep - extends VertexStep implements QueryHolder { + extends VertexStep implements QueryHolder { private static final long serialVersionUID = -7850636388424382454L; @@ -124,7 +123,7 @@ protected Iterator queryEdges(Query query) { } protected ConditionQuery constructEdgesQuery( - Traverser.Admin traverser) { + Traverser.Admin traverser) { HugeGraph graph = TraversalUtil.getGraph(this); // Query for edge with conditions(else conditions for vertex) @@ -140,7 +139,7 @@ protected ConditionQuery constructEdgesQuery( vertex, direction, edgeLabels, this.hasContainers); ConditionQuery query = GraphTransaction.constructEdgesQuery( - vertex, direction, edgeLabels); + vertex, direction, edgeLabels); // Query by sort-keys if (withEdgeCond && edgeLabels.length == 1) { TraversalUtil.fillConditionQuery(query, this.hasContainers, graph); @@ -173,8 +172,8 @@ protected ConditionQuery constructEdgesQuery( */ if (withEdgeCond || withVertexCond) { org.apache.hugegraph.util.E.checkArgument(!this.queryInfo().paging(), - "Can't query by paging " + - "and filtering"); + "Can't query by paging " + + "and filtering"); this.queryInfo().limit(Query.NO_LIMIT); } @@ -198,11 +197,11 @@ public String toString() { } return StringFactory.stepString( - this, - getDirection(), - Arrays.asList(getEdgeLabels()), - getReturnClass().getSimpleName(), - this.hasContainers); + this, + getDirection(), + Arrays.asList(getEdgeLabels()), + getReturnClass().getSimpleName(), + this.hasContainers); } @Override @@ -229,6 +228,7 @@ public Iterator lastTimeResults() { return this.iterator; } + @Override public boolean equals(Object obj) { if (!(obj instanceof HugeVertexStep)) { return false; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepByBatch.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepByBatch.java index a1bc02ca02..9517847331 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepByBatch.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepByBatch.java @@ -23,6 +23,7 @@ import org.apache.hugegraph.backend.query.BatchConditionQuery; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.iterator.BatchMapperIterator; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.HugeKeys; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; @@ -33,10 +34,8 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; -import org.apache.hugegraph.iterator.BatchMapperIterator; - public class HugeVertexStepByBatch - extends HugeVertexStep { + extends HugeVertexStep { private static final long serialVersionUID = -3609787815053052222L; @@ -57,7 +56,7 @@ protected Traverser.Admin processNextStart() { if (this.batchIterator == null) { int batchSize = (int) Query.QUERY_BATCH; this.batchIterator = new BatchMapperIterator<>( - batchSize, this.starts, this::flatMap); + batchSize, this.starts, this::flatMap); } if (this.batchIterator.hasNext()) { @@ -110,7 +109,7 @@ private Iterator flatMap(List> traversers) { } private Iterator vertices( - List> traversers) { + List> traversers) { assert traversers.size() > 0; Iterator edges = this.edges(traversers); @@ -121,7 +120,7 @@ private Iterator edges(List> traversers) { assert traversers.size() > 0; BatchConditionQuery batchQuery = new BatchConditionQuery( - HugeType.EDGE, traversers.size()); + HugeType.EDGE, traversers.size()); for (Traverser.Admin traverser : traversers) { ConditionQuery query = this.constructEdgesQuery(traverser); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepStrategy.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepStrategy.java index a01b1ed238..a7dd687348 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepStrategy.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeVertexStepStrategy.java @@ -31,8 +31,8 @@ import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper; public final class HugeVertexStepStrategy - extends AbstractTraversalStrategy - implements ProviderOptimizationStrategy { + extends AbstractTraversalStrategy + implements ProviderOptimizationStrategy { private static final long serialVersionUID = 491355700217483162L; @@ -47,12 +47,12 @@ private HugeVertexStepStrategy() { } @Override - @SuppressWarnings({ "rawtypes", "unchecked" }) + @SuppressWarnings({"rawtypes", "unchecked"}) public void apply(final Traversal.Admin traversal) { TraversalUtil.convAllHasSteps(traversal); List steps = TraversalHelper.getStepsOfClass( - VertexStep.class, traversal); + VertexStep.class, traversal); boolean batchOptimize = false; if (!steps.isEmpty()) { @@ -92,12 +92,13 @@ public void apply(final Traversal.Admin traversal) { /** * Does a Traversal contain any Path step + * * @param traversal * @return the traversal or its parents contain at least one Path step */ protected static boolean containsPath(Traversal.Admin traversal) { boolean hasPath = TraversalHelper.getStepsOfClass( - PathStep.class, traversal).size() > 0; + PathStep.class, traversal).size() > 0; if (hasPath) { return true; } else if (traversal instanceof EmptyTraversal) { @@ -110,12 +111,13 @@ protected static boolean containsPath(Traversal.Admin traversal) { /** * Does a Traversal contain any Tree step + * * @param traversal * @return the traversal or its parents contain at least one Tree step */ protected static boolean containsTree(Traversal.Admin traversal) { boolean hasTree = TraversalHelper.getStepsOfClass( - TreeStep.class, traversal).size() > 0; + TreeStep.class, traversal).size() > 0; if (hasTree) { return true; } else if (traversal instanceof EmptyTraversal) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/QueryHolder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/QueryHolder.java index 322744ac2e..917f777b95 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/QueryHolder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/QueryHolder.java @@ -21,11 +21,10 @@ import org.apache.hugegraph.backend.query.Aggregate; import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.iterator.Metadatable; import org.apache.tinkerpop.gremlin.process.traversal.Order; import org.apache.tinkerpop.gremlin.process.traversal.step.HasContainerHolder; -import org.apache.hugegraph.iterator.Metadatable; - public interface QueryHolder extends HasContainerHolder, Metadatable { String SYSPROP_PAGE = "~page"; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java index 204c47807a..11a5c0cee4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java @@ -243,7 +243,7 @@ public static void extractRange(Step newStep, long limit = holder.setRange(range.getLowRange(), range.getHighRange()); RangeGlobalStep newRange = new RangeGlobalStep<>( - traversal, 0, limit); + traversal, 0, limit); TraversalHelper.replaceStep(range, newRange, traversal); } } @@ -311,9 +311,9 @@ public static void extractAggregateFunc(Step newStep, } public static ConditionQuery fillConditionQuery( - ConditionQuery query, - List hasContainers, - HugeGraph graph) { + ConditionQuery query, + List hasContainers, + HugeGraph graph) { HugeType resultType = query.resultType(); for (HasContainer has : hasContainers) { @@ -550,7 +550,7 @@ public static Condition convContains2Relation(HugeGraph graph, // Convert contains-key or contains-value BiPredicate bp = has.getPredicate().getBiPredicate(); E.checkArgument(bp == Compare.eq, "CONTAINS query with relation " + - "'%s' is not supported", bp); + "'%s' is not supported", bp); HugeKeys key = token2HugeKey(has.getKey()); E.checkNotNull(key, "token key"); @@ -602,8 +602,8 @@ public static boolean keyForContainsValue(String key) { @SuppressWarnings("unchecked") public static Iterator filterResult( - List hasContainers, - Iterator iterator) { + List hasContainers, + Iterator iterator) { if (hasContainers.isEmpty()) { return (Iterator) iterator; } @@ -626,8 +626,8 @@ public static void convAllHasSteps(Traversal.Admin traversal) { // Extract all has steps in traversal @SuppressWarnings("rawtypes") List steps = - TraversalHelper.getStepsOfAssignableClassRecursively( - HasStep.class, traversal); + TraversalHelper.getStepsOfAssignableClassRecursively( + HasStep.class, traversal); if (steps.isEmpty()) { return; @@ -644,8 +644,8 @@ public static void convAllHasSteps(Traversal.Admin traversal) { } Optional parentGraph = ((Traversal) traversal.getParent()) - .asAdmin() - .getGraph(); + .asAdmin() + .getGraph(); if (parentGraph.filter(g -> !(g instanceof EmptyGraph)).isPresent()) { traversal.setGraph(parentGraph.get()); } @@ -980,7 +980,7 @@ private static Number predicateNumber(String value) { } throw new HugeException( - "Invalid value '%s', expect a number", e, value); + "Invalid value '%s', expect a number", e, value); } } @@ -1005,7 +1005,7 @@ private static Number[] predicateNumbers(String value, int count) { continue; } throw new HugeException( - "Invalid value '%s', expect a list of number", value); + "Invalid value '%s', expect a list of number", value); } return values.toArray(new Number[0]); } @@ -1016,7 +1016,7 @@ private static V predicateArg(String value) { return (V) JsonUtil.fromJson(value, Object.class); } catch (Exception e) { throw new HugeException( - "Invalid value '%s', expect a single value", e, value); + "Invalid value '%s', expect a single value", e, value); } } @@ -1026,7 +1026,7 @@ private static List predicateArgs(String value) { return JsonUtil.fromJson("[" + value + "]", List.class); } catch (Exception e) { throw new HugeException( - "Invalid value '%s', expect a list", e, value); + "Invalid value '%s', expect a list", e, value); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java index f14c19dd25..cf8e71e6d7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java @@ -20,6 +20,7 @@ * @author Matthias Broecheler (me@matthiasb.com) */ public interface Nameable { + /** * Returns the unique name of this entity. * diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/CollectionType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/CollectionType.java index 52d7a3ba1b..e8a4cd3547 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/CollectionType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/CollectionType.java @@ -60,7 +60,7 @@ public static CollectionType fromCode(byte code) { return FU; default: throw new AssertionError( - "Unsupported collection code: " + code); + "Unsupported collection code: " + code); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/DataType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/DataType.java index 69ebdbb643..2bfa93e7d7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/DataType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/DataType.java @@ -22,9 +22,9 @@ import java.util.List; import java.util.UUID; -import org.apache.hugegraph.util.Blob; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.serializer.BytesBuffer; +import org.apache.hugegraph.util.Blob; import org.apache.hugegraph.util.Bytes; import org.apache.hugegraph.util.DateUtil; import org.apache.hugegraph.util.JsonUtil; @@ -132,13 +132,13 @@ public Number valueToNumber(V value) { break; default: throw new AssertionError(String.format( - "Number type only contains Byte, Integer, " + - "Long, Float, Double, but got %s", this.clazz())); + "Number type only contains Byte, Integer, " + + "Long, Float, Double, but got %s", this.clazz())); } } catch (NumberFormatException e) { throw new IllegalArgumentException(String.format( - "Can't read '%s' as %s: %s", - value, this.name, e.getMessage())); + "Can't read '%s' as %s: %s", + value, this.name, e.getMessage())); } return number; } @@ -199,7 +199,7 @@ public Blob valueToBlob(V value) { bytes[i] = ((Number) v).byteValue(); } else { throw new IllegalArgumentException(String.format( - "expect byte or int value, but got '%s'", v)); + "expect byte or int value, but got '%s'", v)); } } return Blob.wrap(bytes); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Directions.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Directions.java index b2e18373b5..1309cd89df 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Directions.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Directions.java @@ -60,7 +60,7 @@ public HugeType type() { return HugeType.EDGE_IN; default: throw new IllegalArgumentException(String.format( - "Can't convert direction '%s' to HugeType", this)); + "Can't convert direction '%s' to HugeType", this)); } } @@ -82,7 +82,7 @@ public Direction direction() { return Direction.BOTH; default: throw new AssertionError(String.format( - "Unrecognized direction: '%s'", this)); + "Unrecognized direction: '%s'", this)); } } @@ -96,7 +96,7 @@ public static Directions convert(Direction direction) { return BOTH; default: throw new AssertionError(String.format( - "Unrecognized direction: '%s'", direction)); + "Unrecognized direction: '%s'", direction)); } } @@ -108,7 +108,7 @@ public static Directions convert(HugeType edgeType) { return IN; default: throw new IllegalArgumentException(String.format( - "Can't convert type '%s' to Direction", edgeType)); + "Can't convert type '%s' to Direction", edgeType)); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/IndexType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/IndexType.java index 4019a34787..019ac98df9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/IndexType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/IndexType.java @@ -82,7 +82,7 @@ public HugeType type() { return HugeType.UNIQUE_INDEX; default: throw new AssertionError(String.format( - "Unknown index type '%s'", this)); + "Unknown index type '%s'", this)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/SerialEnum.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/SerialEnum.java index 8a115bf05b..471c0c2d70 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/SerialEnum.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/SerialEnum.java @@ -21,6 +21,7 @@ import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; + import com.google.common.collect.HashBasedTable; import com.google.common.collect.Table; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/CompressUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/CompressUtil.java index d8ff9e2998..0d41a70959 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/CompressUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/CompressUtil.java @@ -45,7 +45,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.NullOutputStream; - import org.apache.hugegraph.backend.store.raft.RaftContext; import net.jpountz.lz4.LZ4BlockInputStream; @@ -76,12 +75,12 @@ public static void compressTar(String inputDir, String outputFile, } private static void tarDir(Path source, TarArchiveOutputStream tos) - throws IOException { + throws IOException { Files.walkFileTree(source, new SimpleFileVisitor() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) - throws IOException { + throws IOException { String entryName = buildTarEntryName(source, dir); if (!entryName.isEmpty()) { TarArchiveEntry entry = new TarArchiveEntry(dir.toFile(), @@ -95,7 +94,7 @@ public FileVisitResult preVisitDirectory(Path dir, @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attributes) - throws IOException { + throws IOException { // Only copy files, no symbolic links if (attributes.isSymbolicLink()) { return FileVisitResult.CONTINUE; @@ -127,7 +126,7 @@ public static void decompressTar(String sourceFile, String outputDir, Path target = Paths.get(outputDir); if (Files.notExists(source)) { throw new IOException(String.format( - "The source file %s doesn't exists", source)); + "The source file %s doesn't exists", source)); } LZ4Factory factory = LZ4Factory.fastestInstance(); LZ4FastDecompressor decompressor = factory.fastDecompressor(); @@ -160,7 +159,7 @@ public static void decompressTar(String sourceFile, String outputDir, } private static Path zipSlipProtect(ArchiveEntry entry, Path targetDir) - throws IOException { + throws IOException { Path targetDirResolved = targetDir.resolve(entry.getName()); /* * Make sure normalized file still has targetDir as its prefix, @@ -183,7 +182,7 @@ public static void compressZip(String inputDir, String outputFile, public static void compressZip(String rootDir, String sourceDir, String outputFile, Checksum checksum) - throws IOException { + throws IOException { try (FileOutputStream fos = new FileOutputStream(outputFile); CheckedOutputStream cos = new CheckedOutputStream(fos, checksum); BufferedOutputStream bos = new BufferedOutputStream(cos); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ConfigUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ConfigUtil.java index b8cb06e243..15d3f63c08 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ConfigUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ConfigUtil.java @@ -36,11 +36,10 @@ import org.apache.commons.configuration2.tree.NodeModel; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeFactory; import org.apache.hugegraph.config.HugeConfig; +import org.slf4j.Logger; public final class ConfigUtil { @@ -55,13 +54,13 @@ public static void checkGremlinConfig(String conf) { try { FileBasedConfigurationBuilder builder = - new FileBasedConfigurationBuilder(YAMLConfiguration.class) - .configure(params.fileBased().setFileName(conf)); + new FileBasedConfigurationBuilder(YAMLConfiguration.class) + .configure(params.fileBased().setFileName(conf)); YAMLConfiguration config = (YAMLConfiguration) builder.getConfiguration(); List> nodes = - config.childConfigurationsAt( - NODE_GRAPHS); + config.childConfigurationsAt( + NODE_GRAPHS); if (nodes == null || nodes.isEmpty()) { return; } @@ -74,9 +73,9 @@ public static void checkGremlinConfig(String conf) { for (HierarchicalConfiguration node : nodes) { NodeModel nodeModel = node.getNodeModel(); E.checkArgument(nodeModel != null && - (nodeHandler = nodeModel.getNodeHandler()) != null && - (root = nodeHandler.getRootNode()) != null, - "Node '%s' must contain root", node); + (nodeHandler = nodeModel.getNodeHandler()) != null && + (root = nodeHandler.getRootNode()) != null, + "Node '%s' must contain root", node); } } catch (ConfigurationException e) { throw new HugeException("Failed to load yaml config file '%s'", diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java index 00c6655a39..daa54ee958 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java @@ -254,7 +254,7 @@ public static void executeOncePerThread(ExecutorService executor, int totalThreads, Runnable callback, long invokeTimeout) - throws InterruptedException { + throws InterruptedException { // Ensure callback execute at least once for every thread final Map threadsTimes = new ConcurrentHashMap<>(); final List> tasks = new ArrayList<>(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedTimerWindowRateLimiter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedTimerWindowRateLimiter.java index 5fff4b4e8b..0163656d5b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedTimerWindowRateLimiter.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedTimerWindowRateLimiter.java @@ -24,7 +24,7 @@ /** * This class is used for fixed window to limit request per second * The different with stopwatch is to use timer for reducing count times - * + *

* TODO: Move to common module */ public class FixedTimerWindowRateLimiter implements RateLimiter { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedWatchWindowRateLimiter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedWatchWindowRateLimiter.java index 8d93abd2da..620571b621 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedWatchWindowRateLimiter.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/FixedWatchWindowRateLimiter.java @@ -25,10 +25,10 @@ /** * This class is used for fixed watch-window to rate limit request * Now just simplify for performance, don't need lock stop watch - * + *

* Note: This class is not thread safe * TODO: Move to common module - * */ + */ public class FixedWatchWindowRateLimiter implements RateLimiter { private final LongAdder count; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/GZipUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/GZipUtil.java index 8ffef5d17b..962d4a33a1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/GZipUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/GZipUtil.java @@ -22,7 +22,6 @@ import java.util.zip.Inflater; import org.apache.commons.codec.digest.DigestUtils; - import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.serializer.BytesBuffer; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/JsonUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/JsonUtil.java index aa3b491c24..7e634c7781 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/JsonUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/JsonUtil.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.util.Date; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.io.HugeGraphSONModule; import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; import org.apache.tinkerpop.shaded.jackson.core.JsonProcessingException; import org.apache.tinkerpop.shaded.jackson.core.type.TypeReference; @@ -30,8 +32,6 @@ import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.io.HugeGraphSONModule; import com.google.common.collect.ImmutableSet; public final class JsonUtil { @@ -91,9 +91,10 @@ public static T fromJson(String json, TypeReference typeRef) { /** * Number collection will be parsed to Double Collection via fromJson, * this method used to cast element in collection to original number type - * @param object original number - * @param clazz target type - * @return target number + * + * @param object original number + * @param clazz target type + * @return target number */ public static Object castNumber(Object object, Class clazz) { if (object instanceof Number) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/KryoUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/KryoUtil.java index 87df7c688d..ce789c6d8d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/KryoUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/KryoUtil.java @@ -74,7 +74,7 @@ public static byte[] toKryoWithType(Object value) { @SuppressWarnings("unchecked") public static T fromKryoWithType(byte[] value) { - E.checkState(value != null, "Kryo value can't be null for object"); + E.checkState(value != null, "Kryo value can't be null for object"); return (T) kryo().readClassAndObject(new Input(value)); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/LockUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/LockUtil.java index 842b590f16..fd4e6814d6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/LockUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/LockUtil.java @@ -29,14 +29,14 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; -import org.apache.hugegraph.backend.id.Id; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.concurrent.KeyLock; import org.apache.hugegraph.concurrent.LockManager; import org.apache.hugegraph.concurrent.RowLock; import org.apache.hugegraph.type.HugeType; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -109,8 +109,8 @@ private static Lock lockRead(String group, String lock) { lock, group); if (!readLock.tryLock()) { throw new HugeException( - "Lock [%s:%s] is locked by other operation", - group, lock); + "Lock [%s:%s] is locked by other operation", + group, lock); } LOG.debug("Got the read lock '{}' of LockGroup '{}'", lock, group); return readLock; @@ -125,8 +125,8 @@ private static Lock lockWrite(String group, String lock, long time) { try { if (!writeLock.tryLock(time, TimeUnit.SECONDS)) { throw new HugeException( - "Lock [%s:%s] is locked by other operation", - group, lock); + "Lock [%s:%s] is locked by other operation", + group, lock); } break; } catch (InterruptedException ignore) { @@ -197,8 +197,8 @@ public static List lock(String... locks) { break; default: throw new IllegalArgumentException(String.format( - "Invalid args '%s' at position '%s', " + - "expect 'write' or 'read'", locks[i], i)); + "Invalid args '%s' at position '%s', " + + "expect 'write' or 'read'", locks[i], i)); } } return lockList; @@ -216,7 +216,7 @@ public static String hugeType2Group(HugeType type) { return INDEX_LABEL_ADD_UPDATE; default: throw new AssertionError(String.format( - "Invalid HugeType '%s'", type)); + "Invalid HugeType '%s'", type)); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ParameterUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ParameterUtil.java index 4eb81a8b15..3dbbff6b9d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ParameterUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/ParameterUtil.java @@ -17,8 +17,6 @@ package org.apache.hugegraph.util; -import org.apache.hugegraph.util.E; - import java.util.Map; public class ParameterUtil { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/RateLimiter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/RateLimiter.java index 990a03a7ee..e5a9d47643 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/RateLimiter.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/RateLimiter.java @@ -34,6 +34,9 @@ public interface RateLimiter { * * @param ratePerSecond the rate of the returned RateLimiter, measured in * how many permits become available per second + *

+ *

+ *

* * TODO: refactor it to make method unchangeable */ diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Reflection.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Reflection.java index 456ec9a223..150fb2949d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Reflection.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Reflection.java @@ -17,14 +17,14 @@ package org.apache.hugegraph.util; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.exception.NotSupportException; -import org.slf4j.Logger; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.exception.NotSupportException; +import org.slf4j.Logger; + public class Reflection { private static final Logger LOG = Log.logger(Reflection.class); @@ -34,9 +34,9 @@ public class Reflection { private static final Method REGISTER_METHODS_TO_FILTER_MOTHOD; public static final String JDK_INTERNAL_REFLECT_REFLECTION = - "jdk.internal.reflect.Reflection"; + "jdk.internal.reflect.Reflection"; public static final String SUN_REFLECT_REFLECTION = - "sun.reflect.Reflection"; + "sun.reflect.Reflection"; static { Method registerFieldsToFilterMethodTemp = null; @@ -44,7 +44,7 @@ public class Reflection { Class reflectionClazzTemp = null; try { reflectionClazzTemp = Class.forName( - JDK_INTERNAL_REFLECT_REFLECTION); + JDK_INTERNAL_REFLECT_REFLECTION); } catch (ClassNotFoundException e) { try { reflectionClazzTemp = Class.forName(SUN_REFLECT_REFLECTION); @@ -80,7 +80,7 @@ public static void registerFieldsToFilter(Class containingClass, String... fieldNames) { if (REGISTER_FILEDS_TO_FILTER_METHOD == null) { throw new NotSupportException( - "Reflection.registerFieldsToFilter()"); + "Reflection.registerFieldsToFilter()"); } try { @@ -89,8 +89,8 @@ public static void registerFieldsToFilter(Class containingClass, containingClass, fieldNames); } catch (IllegalAccessException | InvocationTargetException e) { throw new HugeException( - "Failed to register class '%s' fields to filter: %s", - containingClass, Arrays.toString(fieldNames)); + "Failed to register class '%s' fields to filter: %s", + containingClass, Arrays.toString(fieldNames)); } } @@ -98,7 +98,7 @@ public static void registerMethodsToFilter(Class containingClass, String... methodNames) { if (REGISTER_METHODS_TO_FILTER_MOTHOD == null) { throw new NotSupportException( - "Reflection.registerMethodsToFilterMethod()"); + "Reflection.registerMethodsToFilterMethod()"); } try { @@ -107,8 +107,8 @@ public static void registerMethodsToFilter(Class containingClass, containingClass, methodNames); } catch (IllegalAccessException | InvocationTargetException e) { throw new HugeException( - "Failed to register class '%s' methods to filter: %s", - containingClass, Arrays.toString(methodNames)); + "Failed to register class '%s' methods to filter: %s", + containingClass, Arrays.toString(methodNames)); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java index 16bf3207a3..c8d831c9cc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java @@ -14,7 +14,6 @@ package org.apache.hugegraph.util; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/CollectionFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/CollectionFactory.java index d600145906..eba9d4fa6e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/CollectionFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/CollectionFactory.java @@ -26,6 +26,8 @@ import java.util.Set; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.E; import org.eclipse.collections.api.map.primitive.IntObjectMap; import org.eclipse.collections.api.map.primitive.MutableIntObjectMap; import org.eclipse.collections.impl.list.mutable.FastList; @@ -33,9 +35,6 @@ import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; import org.eclipse.collections.impl.set.mutable.UnifiedSet; -import org.apache.hugegraph.type.define.CollectionType; -import org.apache.hugegraph.util.E; - import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; @@ -74,7 +73,7 @@ public static List newList(CollectionType type) { return new ObjectArrayList<>(); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -89,7 +88,7 @@ public static List newList(CollectionType type, return new ObjectArrayList<>(initialCapacity); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -104,7 +103,7 @@ public static List newList(CollectionType type, return new ObjectArrayList<>(collection); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -130,7 +129,7 @@ public static Set newSet(CollectionType type) { return new ObjectOpenHashSet<>(); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -145,7 +144,7 @@ public static Set newSet(CollectionType type, return new ObjectOpenHashSet<>(initialCapacity); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -160,7 +159,7 @@ public static Set newSet(CollectionType type, return new ObjectOpenHashSet<>(collection); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -190,7 +189,7 @@ public static Map newMap(CollectionType type) { return new Object2ObjectOpenHashMap<>(); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -205,7 +204,7 @@ public static Map newMap(CollectionType type, return new Object2ObjectOpenHashMap<>(initialCapacity); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -220,7 +219,7 @@ public static Map newMap(CollectionType type, return new Object2ObjectOpenHashMap<>(map); default: throw new AssertionError( - "Unsupported collection type: " + type); + "Unsupported collection type: " + type); } } @@ -229,18 +228,18 @@ public static MutableIntObjectMap newIntObjectMap() { } public static MutableIntObjectMap newIntObjectMap( - int initialCapacity) { + int initialCapacity) { return new IntObjectHashMap<>(initialCapacity); } public static MutableIntObjectMap newIntObjectMap( - IntObjectMap map) { + IntObjectMap map) { return new IntObjectHashMap<>(map); } @SuppressWarnings("unchecked") public static MutableIntObjectMap newIntObjectMap( - Object... objects) { + Object... objects) { IntObjectHashMap map = IntObjectHashMap.newMap(); E.checkArgument(objects.length % 2 == 0, "Must provide even arguments for " + diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IdSet.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IdSet.java index d3d06c223c..57f3b12021 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IdSet.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IdSet.java @@ -23,11 +23,10 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.eclipse.collections.api.iterator.MutableLongIterator; -import org.eclipse.collections.impl.set.mutable.primitive.LongHashSet; - import org.apache.hugegraph.iterator.ExtendableIterator; import org.apache.hugegraph.type.define.CollectionType; +import org.eclipse.collections.api.iterator.MutableLongIterator; +import org.eclipse.collections.impl.set.mutable.primitive.LongHashSet; public class IdSet extends AbstractSet { @@ -65,8 +64,8 @@ public boolean contains(Object object) { @Override public Iterator iterator() { return new ExtendableIterator<>( - this.nonNumberIds.iterator(), - new EcLongIdIterator(this.numberIds.longIterator())); + this.nonNumberIds.iterator(), + new EcLongIdIterator(this.numberIds.longIterator())); } @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/Int2IntsMap.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/Int2IntsMap.java index 654f7bbf50..3786119bb1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/Int2IntsMap.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/Int2IntsMap.java @@ -177,7 +177,7 @@ public String toString() { int capacity = (this.size() + 1) * 64; StringBuilder sb = new StringBuilder(capacity); sb.append("{"); - for (IntIterator iter = this.keys(); iter.hasNext();) { + for (IntIterator iter = this.keys(); iter.hasNext(); ) { if (sb.length() > 1) { sb.append(", "); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntIterator.java index 9dd50b4c49..96d616a964 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntIterator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntIterator.java @@ -47,7 +47,7 @@ public Integer next() { } static IntIterator wrap( - org.eclipse.collections.api.iterator.IntIterator iter) { + org.eclipse.collections.api.iterator.IntIterator iter) { return new EcIntIterator(iter); } @@ -64,7 +64,7 @@ final class EcIntIterator implements IntIterator { private final org.eclipse.collections.api.iterator.IntIterator iterator; public EcIntIterator(org.eclipse.collections.api.iterator.IntIterator - iterator) { + iterator) { this.iterator = iterator; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMap.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMap.java index 2ce53d99c8..fe8f20b78e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMap.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMap.java @@ -22,12 +22,11 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; -import org.eclipse.collections.api.map.primitive.MutableIntIntMap; -import org.eclipse.collections.impl.map.mutable.primitive.IntIntHashMap; - import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.collection.IntIterator.IntIterators; import org.apache.hugegraph.util.collection.IntIterator.MapperInt2IntIterator; +import org.eclipse.collections.api.map.primitive.MutableIntIntMap; +import org.eclipse.collections.impl.map.mutable.primitive.IntIntHashMap; import sun.misc.Unsafe; @@ -70,13 +69,13 @@ final class IntMapBySegments implements IntMap { private static final int DEFAULT_SEGMENTS = (IntSet.CPUS + 8) * 32; private static final Function DEFAULT_CREATOR = - size -> new IntMapByFixedAddr(size); + size -> new IntMapByFixedAddr(size); @SuppressWarnings("static-access") private static final int BASE_OFFSET = UNSAFE.ARRAY_OBJECT_BASE_OFFSET; @SuppressWarnings("static-access") private static final int SHIFT = 31 - Integer.numberOfLeadingZeros( - UNSAFE.ARRAY_OBJECT_INDEX_SCALE); + UNSAFE.ARRAY_OBJECT_INDEX_SCALE); public IntMapBySegments(int capacity) { this(capacity, DEFAULT_SEGMENTS, DEFAULT_CREATOR); @@ -243,7 +242,7 @@ private IntMap segmentAt(int index) { * NOTE: IntMapByFixedAddr is: * - faster 3x than ec IntIntHashMap for single thread; * - faster 8x than ec IntIntHashMap for 4 threads, 4x operations - * with 0.5x cost; + * with 0.5x cost; */ final class IntMapByFixedAddr implements IntMap { @@ -260,7 +259,7 @@ final class IntMapByFixedAddr implements IntMap { private static final int BASE_OFFSET = UNSAFE.ARRAY_INT_BASE_OFFSET; @SuppressWarnings("static-access") private static final int MUL4 = 31 - Integer.numberOfLeadingZeros( - UNSAFE.ARRAY_INT_INDEX_SCALE); + UNSAFE.ARRAY_INT_INDEX_SCALE); public IntMapByFixedAddr(int capacity) { this.capacity = capacity; @@ -280,9 +279,9 @@ public IntMapByFixedAddr(int capacity) { this.indexBlockSize = IntSet.segmentSize(capacity, this.indexBlocksNum); this.indexBlockSizeShift = Integer.numberOfTrailingZeros( - this.indexBlockSize); + this.indexBlockSize); this.indexBlocksSet = new IntSet.IntSetByFixedAddr4Unsigned( - this.indexBlocksNum); + this.indexBlocksNum); this.clear(); } @@ -441,7 +440,7 @@ public boolean hasNext() { } } this.indexOfBlock = indexBlocksSet.nextKey( - this.indexOfBlock + 1); + this.indexOfBlock + 1); this.indexInBlock = 0; } assert !this.fetched; @@ -487,7 +486,7 @@ public boolean hasNext() { } } this.indexOfBlock = indexBlocksSet.nextKey( - this.indexOfBlock + 1); + this.indexOfBlock + 1); this.indexInBlock = 0; } return false; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMapByDynamicHash.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMapByDynamicHash.java index 0e761b9113..52aac88da9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMapByDynamicHash.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntMapByDynamicHash.java @@ -49,8 +49,9 @@ public class IntMapByDynamicHash implements IntMap { private static final int NULL_VALUE = Integer.MIN_VALUE; private static final AtomicReferenceFieldUpdater - TABLE_UPDATER = - AtomicReferenceFieldUpdater.newUpdater(IntMapByDynamicHash.class, Entry[].class, "table"); + TABLE_UPDATER = + AtomicReferenceFieldUpdater.newUpdater(IntMapByDynamicHash.class, Entry[].class, + "table"); private volatile Entry[] table; @@ -94,7 +95,7 @@ public IntMapByDynamicHash(int initialCapacity) { // we want 7 extra slots, and 64 bytes for each slot int are 4 bytes, // so 64 bytes are 16 ints. this.partitionedSize = - new int[SIZE_BUCKETS * 16]; + new int[SIZE_BUCKETS * 16]; } // The end index is for resizeContainer this.table = new Entry[cap + 1]; @@ -203,7 +204,7 @@ private int slowPut(int key, int value, Entry[] currentTable) { int oldVal = e.getValue(); // Key found, replace the entry Entry newEntry = - new Entry(key, value, this.createReplacementChainForRemoval(o, e)); + new Entry(key, value, this.createReplacementChainForRemoval(o, e)); if (IntMapByDynamicHash.casTableAt(currentTable, index, o, newEntry)) { return oldVal; } @@ -385,8 +386,8 @@ public void clear() { Entry o = (Entry) IntMapByDynamicHash.tableAt(currentArray, i); if (o == RESIZED || o == RESIZING) { resizeContainer = - (ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, - currentArray.length - 1); + (ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, + currentArray.length - 1); } else if (o != null) { Entry e = o; if (IntMapByDynamicHash.casTableAt(currentArray, i, o, null)) { @@ -645,7 +646,8 @@ private void transfer(Entry[] src, ResizeContainer resizeContainer) { */ private Entry[] helpWithResize(Entry[] currentArray) { ResizeContainer resizeContainer = - (ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, currentArray.length - 1); + (ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, + currentArray.length - 1); Entry[] newTable = resizeContainer.nextArray; if (resizeContainer.getQueuePosition() > ResizeContainer.QUEUE_INCREMENT) { resizeContainer.incrementResizer(); @@ -714,8 +716,8 @@ private void unconditionalCopy(Entry[] dest, Entry toCopyEntry) { Entry o = (Entry) IntMapByDynamicHash.tableAt(currentArray, index); if (o == RESIZED || o == RESIZING) { currentArray = - ((ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, - length - 1)).nextArray; + ((ResizeContainer) IntMapByDynamicHash.tableAt(currentArray, + length - 1)).nextArray; } else { Entry newEntry; if (o == null) { @@ -743,8 +745,8 @@ private void unconditionalCopy(Entry[] dest, Entry toCopyEntry) { private static final class ResizeContainer extends Entry { private static final int QUEUE_INCREMENT = - Math.min(1 << 10, - Integer.highestOneBit(IntSet.CPUS) << 4); + Math.min(1 << 10, + Integer.highestOneBit(IntSet.CPUS) << 4); private final AtomicInteger resizers = new AtomicInteger(1); private final Entry[] nextArray; private final AtomicInteger queuePosition; @@ -868,6 +870,7 @@ public String toString() { /* ---------------- Iterator -------------- */ private static final class IteratorState { + private Entry[] currentTable; private int start; private int end; @@ -913,11 +916,12 @@ protected HashIterator() { private void findNext() { while (this.index < this.currentState.end) { Entry o = - (Entry) IntMapByDynamicHash.tableAt(this.currentState.currentTable, this.index); + (Entry) IntMapByDynamicHash.tableAt(this.currentState.currentTable, + this.index); if (o == RESIZED || o == RESIZING) { Entry[] nextArray = - IntMapByDynamicHash.this.helpWithResizeWhileCurrentIndex( - this.currentState.currentTable, this.index); + IntMapByDynamicHash.this.helpWithResizeWhileCurrentIndex( + this.currentState.currentTable, this.index); int endResized = this.index + 1; while (endResized < this.currentState.end) { if (IntMapByDynamicHash.tableAt(this.currentState.currentTable, @@ -931,7 +935,7 @@ private void findNext() { } if (endResized < this.currentState.end) { this.todo.add(new IteratorState( - this.currentState.currentTable, endResized, this.currentState.end)); + this.currentState.currentTable, endResized, this.currentState.end)); } int powerTwoLength = this.currentState.currentTable.length - 1; this.todo.add(new IteratorState(nextArray, this.index + powerTwoLength, @@ -974,6 +978,7 @@ final Entry nextEntry() { } private final class ValueIterator extends HashIterator { + @Override public int next() { return this.nextEntry().getValue(); @@ -981,6 +986,7 @@ public int next() { } private final class KeyIterator extends HashIterator { + @Override public int next() { return this.nextEntry().getKey(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntSet.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntSet.java index 3431247c40..ce37bda8ef 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntSet.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/IntSet.java @@ -21,11 +21,10 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; +import org.apache.hugegraph.util.E; import org.eclipse.collections.api.collection.primitive.MutableIntCollection; import org.eclipse.collections.impl.set.mutable.primitive.IntHashSet; -import org.apache.hugegraph.util.E; - import io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess; public interface IntSet { @@ -59,13 +58,13 @@ final class IntSetBySegments implements IntSet { private static final int DEFAULT_SEGMENTS = IntSet.CPUS * 100; private static final Function DEFAULT_CREATOR = - size -> new IntSetByFixedAddr4Unsigned(size); + size -> new IntSetByFixedAddr4Unsigned(size); @SuppressWarnings("static-access") private static final int BASE_OFFSET = UNSAFE.ARRAY_OBJECT_BASE_OFFSET; @SuppressWarnings("static-access") private static final int SHIFT = 31 - Integer.numberOfLeadingZeros( - UNSAFE.ARRAY_OBJECT_INDEX_SCALE); + UNSAFE.ARRAY_OBJECT_INDEX_SCALE); public IntSetBySegments(int capacity) { this(capacity, DEFAULT_SEGMENTS, DEFAULT_CREATOR); @@ -195,7 +194,7 @@ private IntSet segmentAt(int index) { * NOTE: IntSetByFixedAddr is: * - faster 3x than ec IntIntHashSet for single thread; * - faster 6x than ec IntIntHashSet for 4 threads, 4x operations - * with 0.67x cost; + * with 0.67x cost; * - faster 20x than ec IntIntHashSet-segment-lock for 4 threads; * - faster 60x than ec IntIntHashSet-global-lock for 4 threads; */ @@ -302,7 +301,7 @@ final class IntSetByFixedAddr4Unsigned implements IntSet { private static final int BASE_OFFSET = UNSAFE.ARRAY_LONG_BASE_OFFSET; @SuppressWarnings("static-access") private static final int MUL8 = 31 - Integer.numberOfLeadingZeros( - UNSAFE.ARRAY_LONG_INDEX_SCALE); + UNSAFE.ARRAY_LONG_INDEX_SCALE); public IntSetByFixedAddr4Unsigned(int numBits) { this.numBits = numBits; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/ObjectIntMappingFactory.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/ObjectIntMappingFactory.java index 56e9683097..832ed680bc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/ObjectIntMappingFactory.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/collection/ObjectIntMappingFactory.java @@ -17,10 +17,9 @@ package org.apache.hugegraph.util.collection; -import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; public class ObjectIntMappingFactory { @@ -29,13 +28,13 @@ public static ObjectIntMapping newObjectIntMapping() { } public static ObjectIntMapping newObjectIntMapping( - boolean concurrent) { + boolean concurrent) { return concurrent ? new ConcurrentObjectIntMapping<>() : - new SingleThreadObjectIntMapping<>(); + new SingleThreadObjectIntMapping<>(); } public static final class SingleThreadObjectIntMapping - implements ObjectIntMapping { + implements ObjectIntMapping { private static final int MAGIC = 1 << 16; private static final int MAX_OFFSET = 10; @@ -103,7 +102,7 @@ public String toString() { } public static final class ConcurrentObjectIntMapping - implements ObjectIntMapping { + implements ObjectIntMapping { private final SingleThreadObjectIntMapping objectIntMapping; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/variables/HugeVariables.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/variables/HugeVariables.java index 9dc8515e33..fa3c733cc5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/variables/HugeVariables.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/variables/HugeVariables.java @@ -38,10 +38,12 @@ import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaManager; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.Log; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -49,9 +51,6 @@ import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import org.slf4j.Logger; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.Log; - public class HugeVariables implements Graph.Variables { private static final Logger LOG = Log.logger(HugeVariables.class); @@ -216,7 +215,7 @@ public Optional get(String key) { String type = vertex.value(Hidden.hide(VARIABLE_TYPE)); if (!Arrays.asList(TYPES).contains(Hidden.hide(type))) { throw Graph.Variables.Exceptions - .dataTypeOfVariableValueNotSupported(type); + .dataTypeOfVariableValueNotSupported(type); } // The value of key VARIABLE_TYPE is the name of variable value return Optional.of(vertex.value(Hidden.hide(type))); @@ -262,7 +261,7 @@ public Map asMap() { String type = vertex.value(Hidden.hide(VARIABLE_TYPE)); if (!Arrays.asList(TYPES).contains(Hidden.hide(type))) { throw Graph.Variables.Exceptions - .dataTypeOfVariableValueNotSupported(type); + .dataTypeOfVariableValueNotSupported(type); } Object value = vertex.value(Hidden.hide(type)); variables.put(key, value); @@ -322,7 +321,7 @@ private void setProperty(HugeVertex vertex, String key, Object value) { vertex.property(Hidden.hide(VARIABLE_TYPE), STRING_VALUE + suffix); } else { throw Graph.Variables.Exceptions - .dataTypeOfVariableValueNotSupported(value); + .dataTypeOfVariableValueNotSupported(value); } } @@ -335,7 +334,7 @@ private void createVariableVertex(String key, Object value) { this.setProperty(vertex, key, value); } catch (IllegalArgumentException e) { throw Graph.Variables.Exceptions - .dataTypeOfVariableValueNotSupported(value, e); + .dataTypeOfVariableValueNotSupported(value, e); } // PrimaryKey id vertex.assignId(null); @@ -367,7 +366,7 @@ private ConditionQuery createVariableQuery(String name) { query.eq(HugeKeys.LABEL, vl.id()); if (name != null) { PropertyKey pkey = this.params.graph().propertyKey( - Hidden.hide(VARIABLE_KEY)); + Hidden.hide(VARIABLE_KEY)); query.query(Condition.eq(pkey.id(), name)); } query.showHidden(true);