From 9f89afc39a68263aae9b80ddcce59ee26099f68e Mon Sep 17 00:00:00 2001 From: V_Galaxy Date: Wed, 10 Jul 2024 16:35:44 +0800 Subject: [PATCH] feat(server): support new backend Hstore (#2560) subtask of #2483 --- .github/workflows/pd-store-ci.yml | 70 + .../org/apache/hugegraph/api/job/TaskAPI.java | 5 +- .../hugegraph/auth/HugeGraphAuthProxy.java | 36 +- .../store/cassandra/CassandraStore.java | 2 +- .../org/apache/hugegraph/HugeGraphParams.java | 5 +- .../apache/hugegraph/StandardHugeGraph.java | 45 +- .../org/apache/hugegraph/auth/HugeAccess.java | 5 + .../org/apache/hugegraph/auth/HugeBelong.java | 14 + .../org/apache/hugegraph/auth/HugeGroup.java | 14 + .../org/apache/hugegraph/auth/HugeRole.java | 240 +++ .../org/apache/hugegraph/auth/HugeTarget.java | 18 + .../org/apache/hugegraph/auth/HugeUser.java | 14 + .../apache/hugegraph/auth/SchemaDefine.java | 22 + .../hugegraph/backend/cache/CacheManager.java | 4 + .../cache/CachedSchemaTransaction.java | 2 +- .../cache/CachedSchemaTransactionV2.java | 488 +++++++ .../backend/serializer/BinarySerializer.java | 26 +- .../backend/serializer/BytesBuffer.java | 17 +- .../backend/serializer/TextSerializer.java | 1 + .../backend/store/BackendStoreInfo.java | 9 +- .../backend/store/BackendStoreProvider.java | 4 + .../backend/store/memory/InMemoryDBStore.java | 2 +- .../backend/tx/GraphIndexTransaction.java | 10 +- .../backend/tx/GraphTransaction.java | 38 +- .../backend/tx/ISchemaTransaction.java | 111 ++ .../hugegraph/backend/tx/IdCounter.java | 142 ++ .../backend/tx/SchemaTransaction.java | 5 +- .../backend/tx/SchemaTransactionV2.java | 757 ++++++++++ .../job/schema/EdgeLabelRemoveJob.java | 3 +- .../job/schema/IndexLabelRebuildJob.java | 5 +- .../job/schema/IndexLabelRemoveJob.java | 3 +- .../job/schema/OlapPropertyKeyClearJob.java | 5 +- .../job/schema/OlapPropertyKeyCreateJob.java | 3 +- .../job/schema/OlapPropertyKeyRemoveJob.java | 3 +- .../hugegraph/job/schema/SchemaJob.java | 10 +- .../job/schema/VertexLabelRemoveJob.java | 3 +- .../StandardClusterRoleStore.java | 7 +- .../apache/hugegraph/meta/EtcdMetaDriver.java | 322 ++++ .../org/apache/hugegraph/meta/MetaDriver.java | 73 + .../apache/hugegraph/meta/MetaManager.java | 1297 +++++++++++++++++ .../apache/hugegraph/meta/PdMetaDriver.java | 212 +++ .../meta/lock/EtcdDistributedLock.java | 167 +++ .../hugegraph/meta/lock/LockResult.java | 61 + .../meta/lock/PdDistributedLock.java | 94 ++ .../meta/managers/AbstractMetaManager.java | 94 ++ .../meta/managers/AuthMetaManager.java | 1035 +++++++++++++ .../meta/managers/ConfigMetaManager.java | 149 ++ .../meta/managers/GraphMetaManager.java | 284 ++++ .../meta/managers/KafkaMetaManager.java | 46 + .../meta/managers/LockMetaManager.java | 27 + .../meta/managers/SchemaMetaManager.java | 517 +++++++ .../managers/SchemaTemplateMetaManager.java | 107 ++ .../meta/managers/ServiceMetaManager.java | 170 +++ .../meta/managers/SpaceMetaManager.java | 202 +++ .../meta/managers/TaskMetaManager.java | 76 + .../apache/hugegraph/schema/EdgeLabel.java | 175 +++ .../apache/hugegraph/schema/IndexLabel.java | 75 + .../apache/hugegraph/schema/PropertyKey.java | 84 ++ .../hugegraph/schema/SchemaElement.java | 32 + .../hugegraph/schema/SchemaManager.java | 6 +- .../apache/hugegraph/schema/VertexLabel.java | 111 ++ .../schema/builder/AbstractBuilder.java | 5 +- .../schema/builder/EdgeLabelBuilder.java | 5 +- .../schema/builder/IndexLabelBuilder.java | 5 +- .../schema/builder/PropertyKeyBuilder.java | 5 +- .../schema/builder/VertexLabelBuilder.java | 5 +- .../apache/hugegraph/space/GraphSpace.java | 512 +++++++ .../hugegraph/space/SchemaTemplate.java | 140 ++ .../org/apache/hugegraph/space/Service.java | 361 +++++ .../apache/hugegraph/structure/HugeEdge.java | 65 + .../apache/hugegraph/structure/HugeIndex.java | 15 +- .../hugegraph/structure/HugeVertex.java | 14 + .../task/DistributedTaskScheduler.java | 652 +++++++++ .../org/apache/hugegraph/task/HugeTask.java | 95 +- .../apache/hugegraph/task/HugeTaskResult.java | 122 ++ .../hugegraph/task/ServerInfoManager.java | 13 +- .../hugegraph/task/StandardTaskScheduler.java | 178 +-- .../task/TaskAndResultScheduler.java | 335 +++++ .../task/TaskAndResultTransaction.java | 103 ++ .../apache/hugegraph/task/TaskManager.java | 215 ++- .../apache/hugegraph/task/TaskScheduler.java | 18 +- .../org/apache/hugegraph/task/TaskStatus.java | 4 +- .../hugegraph/task/TaskTransaction.java | 165 +++ .../hugegraph/type/define/EdgeLabelType.java | 70 + hugegraph-server/hugegraph-dist/pom.xml | 5 + .../static/conf/graphs/hugegraph.properties | 13 +- .../src/assembly/travis/install-backend.sh | 3 + .../src/assembly/travis/install-hstore.sh | 23 + .../src/assembly/travis/start-pd.sh | 18 +- .../src/assembly/travis/start-server.sh | 3 +- .../src/assembly/travis/start-store.sh | 26 + .../apache/hugegraph/dist/RegisterUtil.java | 12 + .../src/main/resources/backend.properties | 2 +- .../backend/store/hbase/HbaseStore.java | 2 +- .../backend/store/mysql/MysqlStore.java | 2 +- .../backend/store/rocksdb/RocksDBStore.java | 2 +- .../backend/store/rocksdb/RocksDBTable.java | 23 + hugegraph-server/hugegraph-test/pom.xml | 10 + .../apache/hugegraph/api/MetricsApiTest.java | 3 + .../apache/hugegraph/core/BaseCoreTest.java | 9 + .../apache/hugegraph/core/EdgeCoreTest.java | 5 +- .../hugegraph/core/MultiGraphsTest.java | 14 +- .../apache/hugegraph/core/TaskCoreTest.java | 6 +- .../apache/hugegraph/core/VertexCoreTest.java | 11 +- .../apache/hugegraph/tinkerpop/TestGraph.java | 2 +- hugegraph-store/.gitignore | 3 + .../rocksdb/access/RocksDBOptions.java | 22 +- .../rocksdb/access/RocksDBSession.java | 3 +- 108 files changed, 10558 insertions(+), 330 deletions(-) create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/DistributedTaskScheduler.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTaskResult.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultScheduler.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultTransaction.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskTransaction.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java create mode 100755 hugegraph-server/hugegraph-dist/src/assembly/travis/install-hstore.sh create mode 100755 hugegraph-server/hugegraph-dist/src/assembly/travis/start-store.sh diff --git a/.github/workflows/pd-store-ci.yml b/.github/workflows/pd-store-ci.yml index df67b242b7..572ea55c45 100644 --- a/.github/workflows/pd-store-ci.yml +++ b/.github/workflows/pd-store-ci.yml @@ -116,3 +116,73 @@ jobs: uses: codecov/codecov-action@v3.0.0 with: file: ${{ env.REPORT_DIR }}/*.xml + + hstore: + # TODO: avoid duplicated env setup + runs-on: ubuntu-latest + env: + USE_STAGE: 'true' # Whether to include the stage repository. + TRAVIS_DIR: hugegraph-server/hugegraph-dist/src/assembly/travis + REPORT_DIR: target/site/jacoco + BACKEND: hstore + RELEASE_BRANCH: ${{ startsWith(github.ref_name, 'release-') || startsWith(github.ref_name, 'test-') || startsWith(github.base_ref, 'release-') }} + + steps: + - name: Install JDK 11 + uses: actions/setup-java@v3 + with: + java-version: '11' + distribution: 'zulu' + + - name: Cache Maven packages + uses: actions/cache@v3 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 2 + + - name: use staged maven repo settings + if: ${{ env.USE_STAGE == 'true' }} + run: | + cp $HOME/.m2/settings.xml /tmp/settings.xml + mv -vf .github/configs/settings.xml $HOME/.m2/settings.xml + + - name: Package + run: | + mvn clean package -U -Dmaven.javadoc.skip=true -Dmaven.test.skip=true -ntp + + - name: Prepare env and service + run: | + $TRAVIS_DIR/install-backend.sh $BACKEND + + - name: Run unit test + run: | + $TRAVIS_DIR/run-unit-test.sh $BACKEND + + - name: Run core test + run: | + $TRAVIS_DIR/run-core-test.sh $BACKEND + + - name: Run api test + run: | + $TRAVIS_DIR/run-api-test.sh $BACKEND $REPORT_DIR + + - name: Run raft test + if: ${{ env.BACKEND == 'rocksdb' }} + run: | + $TRAVIS_DIR/run-api-test-for-raft.sh $BACKEND $REPORT_DIR + + - name: Run TinkerPop test + if: ${{ env.RELEASE_BRANCH == 'true' }} + run: | + $TRAVIS_DIR/run-tinkerpop-test.sh $BACKEND tinkerpop + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3.0.0 + with: + file: ${{ env.REPORT_DIR }}/*.xml diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/TaskAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/TaskAPI.java index 42cc7d7f02..d9b90de103 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/TaskAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/TaskAPI.java @@ -136,11 +136,12 @@ public Map get(@Context GraphManager manager, @RedirectFilter.RedirectMasterRole public void delete(@Context GraphManager manager, @PathParam("graph") String graph, - @PathParam("id") long id) { + @PathParam("id") long id, + @DefaultValue("false") @QueryParam("force") boolean force) { LOG.debug("Graph [{}] delete task: {}", graph, id); TaskScheduler scheduler = graph(manager, graph).taskScheduler(); - HugeTask task = scheduler.delete(IdGenerator.of(id)); + HugeTask task = scheduler.delete(IdGenerator.of(id), force); E.checkArgument(task != null, "There is no task with id '%s'", id); } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java index e611d166f4..63d3912793 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java @@ -26,6 +26,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; @@ -71,6 +72,7 @@ import org.apache.hugegraph.structure.HugeFeatures; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.task.HugeTask; +import org.apache.hugegraph.task.ServerInfoManager; import org.apache.hugegraph.task.TaskManager; import org.apache.hugegraph.task.TaskScheduler; import org.apache.hugegraph.task.TaskStatus; @@ -1085,10 +1087,10 @@ public Iterator> tasks(TaskStatus status, } @Override - public HugeTask delete(Id id) { + public HugeTask delete(Id id, boolean force) { verifyTaskPermission(HugePermission.DELETE, this.taskScheduler.task(id)); - return this.taskScheduler.delete(id); + return this.taskScheduler.delete(id, force); } @Override @@ -1124,6 +1126,36 @@ public void checkRequirement(String op) { this.taskScheduler.checkRequirement(op); } + @Override + public V call(Callable callable) { + verifyAnyPermission(); + return this.taskScheduler.call(callable); + } + + @Override + public V call(Runnable runnable) { + verifyAnyPermission(); + return this.taskScheduler.call(runnable); + } + + @Override + public ServerInfoManager serverManager() { + verifyAnyPermission(); + return this.taskScheduler.serverManager(); + } + + @Override + public String graphName() { + verifyAnyPermission(); + return this.taskScheduler.graphName(); + } + + @Override + public void taskDone(HugeTask task) { + verifyAnyPermission(); + this.taskScheduler.taskDone(task); + } + private void verifyTaskPermission(HugePermission actionPerm) { verifyPermission(actionPerm, ResourceType.TASK); } diff --git a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java index 85a592945e..d302c65d5c 100644 --- a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java +++ b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java @@ -592,7 +592,7 @@ protected Collection tables() { @Override protected final CassandraTable table(HugeType type) { - return this.table(type.string()); + return this.table(convertTaskOrServerToVertex(type).string()); } protected final CassandraTable table(String name) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java index ec50e004cb..5556fdfa6a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.backend.store.ram.RamTable; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.event.EventHub; @@ -47,7 +48,7 @@ public interface HugeGraphParams { GraphReadMode readMode(); - SchemaTransaction schemaTransaction(); + ISchemaTransaction schemaTransaction(); GraphTransaction systemTransaction(); @@ -94,4 +95,6 @@ public interface HugeGraphParams { RamTable ramtable(); void submitEphemeralJob(EphemeralJob job); + + String schedulerType(); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java index 05a44a4a1c..4e9263a488 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java @@ -37,6 +37,7 @@ import org.apache.hugegraph.backend.cache.CacheNotifier.SchemaCacheNotifier; import org.apache.hugegraph.backend.cache.CachedGraphTransaction; import org.apache.hugegraph.backend.cache.CachedSchemaTransaction; +import org.apache.hugegraph.backend.cache.CachedSchemaTransactionV2; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.id.SnowflakeIdGenerator; @@ -52,7 +53,7 @@ import org.apache.hugegraph.backend.store.raft.RaftGroupManager; import org.apache.hugegraph.backend.store.ram.RamTable; import org.apache.hugegraph.backend.tx.GraphTransaction; -import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.config.TypedOption; @@ -69,6 +70,7 @@ import org.apache.hugegraph.masterelection.RoleElectionStateMachine; import org.apache.hugegraph.masterelection.StandardClusterRoleStore; import org.apache.hugegraph.masterelection.StandardRoleElectionStateMachine; +import org.apache.hugegraph.meta.MetaManager; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.rpc.RpcServiceConfig4Client; import org.apache.hugegraph.rpc.RpcServiceConfig4Server; @@ -176,6 +178,8 @@ public class StandardHugeGraph implements HugeGraph { private final RamTable ramtable; + private final String schedulerType; + public StandardHugeGraph(HugeConfig config) { this.params = new StandardHugeGraphParams(); this.configuration = config; @@ -209,6 +213,7 @@ public StandardHugeGraph(HugeConfig config) { this.closed = false; this.mode = GraphMode.NONE; this.readMode = GraphReadMode.OLTP_ONLY; + this.schedulerType = config.get(CoreOptions.SCHEDULER_TYPE); LockUtil.init(this.name); @@ -221,6 +226,13 @@ public StandardHugeGraph(HugeConfig config) { throw new HugeException(message, e); } + if (isHstore()) { + // TODO: parameterize the remaining configurations + MetaManager.instance().connect("hg", MetaManager.MetaDriverType.PD, + "ca", "ca", "ca", + config.get(CoreOptions.PD_PEERS)); + } + try { this.tx = new TinkerPopTransaction(this); boolean supportsPersistence = this.backendStoreFeatures().supportsPersistence(); @@ -457,9 +469,18 @@ private void clearVertexCache() { } } - private SchemaTransaction openSchemaTransaction() throws HugeException { + private boolean isHstore() { + return this.storeProvider.isHstore(); + } + + private ISchemaTransaction openSchemaTransaction() throws HugeException { this.checkGraphNotClosed(); try { + if (isHstore()) { + return new CachedSchemaTransactionV2( + MetaManager.instance().metaDriver(), + MetaManager.instance().cluster(), this.params); + } return new CachedSchemaTransaction(this.params, loadSchemaStore()); } catch (BackendException e) { String message = "Failed to open schema transaction"; @@ -504,11 +525,14 @@ private BackendStore loadGraphStore() { } private BackendStore loadSystemStore() { + if (isHstore()) { + return this.storeProvider.loadGraphStore(this.configuration); + } return this.storeProvider.loadSystemStore(this.configuration); } @Watched - private SchemaTransaction schemaTransaction() { + private ISchemaTransaction schemaTransaction() { this.checkGraphNotClosed(); /* * NOTE: each schema operation will be auto committed, @@ -1196,7 +1220,7 @@ public GraphReadMode readMode() { } @Override - public SchemaTransaction schemaTransaction() { + public ISchemaTransaction schemaTransaction() { return StandardHugeGraph.this.schemaTransaction(); } @@ -1316,6 +1340,11 @@ public RamTable ramtable() { public void submitEphemeralJob(EphemeralJob job) { this.ephemeralJobQueue.add(job); } + + @Override + public String schedulerType() { + return StandardHugeGraph.this.schedulerType; + } } private class TinkerPopTransaction extends AbstractThreadLocalTransaction { @@ -1447,7 +1476,7 @@ private void setClosed() { } } - private SchemaTransaction schemaTransaction() { + private ISchemaTransaction schemaTransaction() { return this.getOrNewTransaction().schemaTx; } @@ -1468,7 +1497,7 @@ private Txs getOrNewTransaction() { Txs txs = this.transactions.get(); if (txs == null) { - SchemaTransaction schemaTransaction = null; + ISchemaTransaction schemaTransaction = null; SysTransaction sysTransaction = null; GraphTransaction graphTransaction = null; try { @@ -1511,12 +1540,12 @@ private void destroyTransaction() { private static final class Txs { - private final SchemaTransaction schemaTx; + private final ISchemaTransaction schemaTx; private final SysTransaction systemTx; private final GraphTransaction graphTx; private long openedTime; - public Txs(SchemaTransaction schemaTx, SysTransaction systemTx, + public Txs(ISchemaTransaction schemaTx, SysTransaction systemTx, GraphTransaction graphTx) { assert schemaTx != null && systemTx != null && graphTx != null; this.schemaTx = schemaTx; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java index 8bec94341b..85ac82bc15 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java @@ -228,4 +228,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeAccess fromMap(Map map) { + HugeAccess access = new HugeAccess(null, null, null); + return fromMap(map, access); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java index f39a7b6ea2..ce23f88013 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java @@ -32,10 +32,15 @@ public class HugeBelong extends Relationship { + public static final String UG = "ug"; + public static final String UR = "ur"; + public static final String GR = "gr"; + public static final String ALL = "*"; private static final long serialVersionUID = -7242751631755533423L; private final Id user; private final Id group; + private String link; private String description; public HugeBelong(Id user, Id group) { @@ -74,6 +79,10 @@ public Id target() { return this.group; } + public String link() { + return this.link; + } + public String description() { return this.description; } @@ -193,4 +202,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeBelong fromMap(Map map) { + HugeBelong belong = new HugeBelong(null, null); + return fromMap(map, belong); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java index dd6a3d286e..ee2470076e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java @@ -36,6 +36,7 @@ public class HugeGroup extends Entity { private static final long serialVersionUID = 2330399818352242686L; private String name; + private String nickname; private String description; public HugeGroup(String name) { @@ -67,6 +68,14 @@ public String name() { return this.name; } + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + public String description() { return this.description; } @@ -194,4 +203,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeGroup fromMap(Map map) { + HugeGroup group = new HugeGroup(""); + return fromMap(map, group); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java new file mode 100644 index 0000000000..ee5955bbaf --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java @@ -0,0 +1,240 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.auth; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; +import org.apache.tinkerpop.gremlin.structure.T; + +public class HugeRole extends Entity { + + private static final long serialVersionUID = 2330399818352242686L; + + private String name; + private String nickname; + private String graphSpace; + private String description; + + public HugeRole(Id id, String name, String graphSpace) { + this.id = id; + this.name = name; + this.graphSpace = graphSpace; + this.description = null; + } + + public HugeRole(String name, String graphSpace) { + this(StringUtils.isNotEmpty(name) ? IdGenerator.of(name) : null, + name, graphSpace); + } + + public HugeRole(Id id, String graphSpace) { + this(id, id.asString(), graphSpace); + } + + public static HugeRole fromMap(Map map) { + HugeRole role = new HugeRole("", ""); + return fromMap(map, role); + } + + public static Schema schema(HugeGraphParams graph) { + return new Schema(graph); + } + + @Override + public ResourceType type() { + return ResourceType.GRANT; + } + + @Override + public String label() { + return P.ROLE; + } + + @Override + public String name() { + return this.name; + } + + public void name(String name) { + this.name = name; + } + + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + + public String graphSpace() { + return this.graphSpace; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + @Override + public String toString() { + return String.format("HugeGroup(%s)", this.id); + } + + @Override + protected boolean property(String key, Object value) { + if (super.property(key, value)) { + return true; + } + switch (key) { + case P.GRAPHSPACE: + this.graphSpace = (String) value; + break; + case P.NAME: + this.name = (String) value; + break; + case P.NICKNAME: + this.nickname = (String) value; + break; + case P.DESCRIPTION: + this.description = (String) value; + break; + default: + throw new AssertionError("Unsupported key: " + key); + } + return true; + } + + @Override + protected Object[] asArray() { + E.checkState(this.name != null, "Group name can't be null"); + + List list = new ArrayList<>(12); + + list.add(T.label); + list.add(P.ROLE); + + list.add(P.GRAPHSPACE); + list.add(this.graphSpace); + + list.add(P.NAME); + list.add(this.name); + + if (this.nickname != null) { + list.add(P.NICKNAME); + list.add(this.nickname); + } + + if (this.description != null) { + list.add(P.DESCRIPTION); + list.add(this.description); + } + + return super.asArray(list); + } + + @Override + public Map asMap() { + E.checkState(this.name != null, "Group name can't be null"); + + Map map = new HashMap<>(); + + map.put(Hidden.unHide(P.NAME), this.name); + map.put(Hidden.unHide(P.GRAPHSPACE), this.graphSpace); + if (this.description != null) { + map.put(Hidden.unHide(P.DESCRIPTION), this.description); + } + + if (this.nickname != null) { + map.put(Hidden.unHide(P.NICKNAME), this.nickname); + } + + return super.asMap(map); + } + + public static final class P { + + public static final String ROLE = Hidden.hide("role"); + + public static final String ID = T.id.getAccessor(); + public static final String LABEL = T.label.getAccessor(); + + public static final String NAME = "~role_name"; + public static final String NICKNAME = "~role_nickname"; + public static final String GRAPHSPACE = "~graphspace"; + public static final String DESCRIPTION = "~role_description"; + + public static String unhide(String key) { + final String prefix = Hidden.hide("role_"); + if (key.startsWith(prefix)) { + return key.substring(prefix.length()); + } + return key; + } + } + + public static final class Schema extends SchemaDefine { + + public Schema(HugeGraphParams graph) { + super(graph, P.ROLE); + } + + @Override + public void initSchemaIfNeeded() { + if (this.existVertexLabel(this.label)) { + return; + } + + String[] properties = this.initProperties(); + + // Create vertex label + VertexLabel label = this.schema().vertexLabel(this.label) + .properties(properties) + .usePrimaryKeyId() + .primaryKeys(P.NAME) + .nullableKeys(P.DESCRIPTION, P.NICKNAME) + .enableLabelIndex(true) + .build(); + this.graph.schemaTransaction().addVertexLabel(label); + } + + protected String[] initProperties() { + List props = new ArrayList<>(); + + props.add(createPropertyKey(P.NAME)); + props.add(createPropertyKey(P.DESCRIPTION)); + props.add(createPropertyKey(P.NICKNAME)); + + return super.initProperties(props); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java index 9f19f85f43..56f59c27bf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java @@ -41,6 +41,7 @@ public class HugeTarget extends Entity { private String name; private String graph; + private String description; private String url; private List resources; @@ -91,6 +92,18 @@ public String graph() { return this.graph; } + public void graph(String graph) { + this.graph = graph; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + public String url() { return this.url; } @@ -257,4 +270,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeTarget fromMap(Map map) { + HugeTarget target = new HugeTarget(null); + return fromMap(map, target); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java index 465d417675..1fc087a707 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java @@ -36,6 +36,7 @@ public class HugeUser extends Entity { private static final long serialVersionUID = -8951193710873772717L; private String name; + private String nickname; private String password; private String phone; private String email; @@ -73,6 +74,14 @@ public String name() { return this.name; } + public String nickname() { + return nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + public String password() { return this.password; } @@ -280,4 +289,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeUser fromMap(Map map) { + HugeUser user = new HugeUser(""); + return fromMap(map, user); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java index 98072d231b..faec762a03 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java @@ -26,6 +26,7 @@ import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.auth.HugeTarget.P; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaManager; @@ -243,6 +244,14 @@ public abstract static class Entity extends AuthElement private static final long serialVersionUID = 4113319546914811762L; + public static T fromMap(Map map, T entity) { + for (Map.Entry item : map.entrySet()) { + entity.property(Hidden.hide(item.getKey()), item.getValue()); + } + entity.id(IdGenerator.of(entity.name())); + return entity; + } + public static T fromVertex(Vertex vertex, T entity) { E.checkArgument(vertex.label().equals(entity.label()), "Illegal vertex label '%s' for entity '%s'", @@ -279,6 +288,19 @@ public abstract static class Relationship extends AuthElement { public abstract Id target(); + public void setId() { + this.id(IdGenerator.of(this.source().asString() + "->" + + this.target().asString())); + } + + public static T fromMap(Map map, T entity) { + for (Map.Entry item : map.entrySet()) { + entity.property(Hidden.hide(item.getKey()), item.getValue()); + } + entity.setId(); + return entity; + } + public static T fromEdge(Edge edge, T relationship) { E.checkArgument(edge.label().equals(relationship.label()), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java index c7a43e228e..30bffab814 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CacheManager.java @@ -150,4 +150,8 @@ public Cache levelCache(HugeGraph graph, String name, "Invalid cache implement: %s", cache.getClass()); return cache; } + + public void clearCache() { + this.caches.clear(); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java index ba5c5821b9..4f9e5f5937 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransaction.java @@ -278,7 +278,7 @@ protected T getSchema(HugeType type, } @Override - protected void removeSchema(SchemaElement schema) { + public void removeSchema(SchemaElement schema) { super.removeSchema(schema); this.invalidateCache(schema.type(), schema.id()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java new file mode 100644 index 0000000000..e6a5e78533 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java @@ -0,0 +1,488 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.cache; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.store.ram.IntObjectMap; +import org.apache.hugegraph.backend.tx.SchemaTransactionV2; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.event.EventHub; +import org.apache.hugegraph.event.EventListener; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.perf.PerfUtil; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Events; + +import com.google.common.collect.ImmutableSet; + +public class CachedSchemaTransactionV2 extends SchemaTransactionV2 { + private final Cache idCache; + private final Cache nameCache; + + private final SchemaCaches arrayCaches; + + private EventListener storeEventListener; + private EventListener cacheEventListener; + + public CachedSchemaTransactionV2(MetaDriver metaDriver, + String cluster, + HugeGraphParams graphParams) { + super(metaDriver, cluster, graphParams); + + final long capacity = graphParams.configuration() + .get(CoreOptions.SCHEMA_CACHE_CAPACITY); + this.idCache = this.cache("schema-id", capacity); + this.nameCache = this.cache("schema-name", capacity); + + SchemaCaches attachment = this.idCache.attachment(); + if (attachment == null) { + int acSize = (int) (capacity >> 3); + attachment = this.idCache.attachment(new SchemaCaches<>(acSize)); + } + this.arrayCaches = attachment; + this.listenChanges(); + } + + private static Id generateId(HugeType type, Id id) { + // NOTE: it's slower performance to use: + // String.format("%x-%s", type.code(), name) + return IdGenerator.of(type.string() + "-" + id.asString()); + } + + private static Id generateId(HugeType type, String name) { + return IdGenerator.of(type.string() + "-" + name); + } + + public void close() { + this.clearCache(false); + this.unlistenChanges(); + } + + private Cache cache(String prefix, long capacity) { + // TODO: uncomment later - graph space + final String name = prefix + "-" + this.graphName(); + // NOTE: must disable schema cache-expire due to getAllSchema() + return CacheManager.instance().cache(name, capacity); + } + + private void listenChanges() { + // Listen store event: "store.init", "store.clear", ... + Set storeEvents = ImmutableSet.of(Events.STORE_INIT, + Events.STORE_CLEAR, + Events.STORE_TRUNCATE); + this.storeEventListener = event -> { + if (storeEvents.contains(event.name())) { + LOG.debug("Graph {} clear schema cache on event '{}'", + this.graph(), event.name()); + this.clearCache(true); + return true; + } + return false; + }; + this.graphParams().loadGraphStore().provider().listen(this.storeEventListener); + + // Listen cache event: "cache"(invalid cache item) + this.cacheEventListener = event -> { + LOG.debug("Graph {} received schema cache event: {}", + this.graph(), event); + Object[] args = event.args(); + E.checkArgument(args.length > 0 && args[0] instanceof String, + "Expect event action argument"); + if (Cache.ACTION_INVALID.equals(args[0])) { + event.checkArgs(String.class, HugeType.class, Id.class); + HugeType type = (HugeType) args[1]; + Id id = (Id) args[2]; + this.arrayCaches.remove(type, id); + + id = generateId(type, id); + Object value = this.idCache.get(id); + if (value != null) { + // Invalidate id cache + this.idCache.invalidate(id); + + // Invalidate name cache + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), + schema.name()); + this.nameCache.invalidate(prefixedName); + } + this.resetCachedAll(type); + return true; + } else if (Cache.ACTION_CLEAR.equals(args[0])) { + event.checkArgs(String.class, HugeType.class); + this.clearCache(false); + return true; + } + return false; + }; + EventHub schemaEventHub = this.graphParams().schemaEventHub(); + if (!schemaEventHub.containsListener(Events.CACHE)) { + schemaEventHub.listen(Events.CACHE, this.cacheEventListener); + } + } + + public void clearCache(boolean notify) { + this.idCache.clear(); + this.nameCache.clear(); + this.arrayCaches.clear(); + } + + private void resetCachedAllIfReachedCapacity() { + if (this.idCache.size() >= this.idCache.capacity()) { + LOG.warn("Schema cache reached capacity({}): {}", + this.idCache.capacity(), this.idCache.size()); + this.cachedTypes().clear(); + } + } + + private void unlistenChanges() { + // Unlisten store event + this.graphParams().loadGraphStore().provider() + .unlisten(this.storeEventListener); + + // Unlisten cache event + EventHub schemaEventHub = this.graphParams().schemaEventHub(); + schemaEventHub.unlisten(Events.CACHE, this.cacheEventListener); + } + + private CachedTypes cachedTypes() { + return this.arrayCaches.cachedTypes(); + } + + private void resetCachedAll(HugeType type) { + // Set the cache all flag of the schema type to false + this.cachedTypes().put(type, false); + } + + private void invalidateCache(HugeType type, Id id) { + // remove from id cache and name cache + Id prefixedId = generateId(type, id); + Object value = this.idCache.get(prefixedId); + if (value != null) { + this.idCache.invalidate(prefixedId); + + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.invalidate(prefixedName); + } + + // remove from optimized array cache + this.arrayCaches.remove(type, id); + } + + @Override + protected void updateSchema(SchemaElement schema, + Consumer updateCallback) { + super.updateSchema(schema, updateCallback); + + this.updateCache(schema); + } + + @Override + protected void addSchema(SchemaElement schema) { + super.addSchema(schema); + + this.updateCache(schema); + + if (!this.graph().option(CoreOptions.TASK_SYNC_DELETION)) { + MetaManager.instance() + // TODO: uncomment later - graph space + //.notifySchemaCacheClear(this.graph().graphSpace(), + // this.graph().name()); + .notifySchemaCacheClear("", + this.graph().name()); + } + } + + private void updateCache(SchemaElement schema) { + this.resetCachedAllIfReachedCapacity(); + + // update id cache + Id prefixedId = generateId(schema.type(), schema.id()); + this.idCache.update(prefixedId, schema); + + // update name cache + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + + // update optimized array cache + this.arrayCaches.updateIfNeeded(schema); + } + + @Override + public void removeSchema(SchemaElement schema) { + super.removeSchema(schema); + + this.invalidateCache(schema.type(), schema.id()); + + if (!this.graph().option(CoreOptions.TASK_SYNC_DELETION)) { + MetaManager.instance() + // TODO: uncomment later - graph space + //.notifySchemaCacheClear(this.graph().graphSpace(), + // this.graph().name()); + .notifySchemaCacheClear("", + this.graph().name()); + } + } + + @Override + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, Id id) { + // try get from optimized array cache + if (id.number() && id.asLong() > 0L) { + SchemaElement value = this.arrayCaches.get(type, id); + if (value != null) { + return (T) value; + } + } + + Id prefixedId = generateId(type, id); + Object value = this.idCache.get(prefixedId); + if (value == null) { + value = super.getSchema(type, id); + if (value != null) { + this.resetCachedAllIfReachedCapacity(); + + this.idCache.update(prefixedId, value); + + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + } + } + + // update optimized array cache + this.arrayCaches.updateIfNeeded((SchemaElement) value); + + return (T) value; + } + + @Override + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, + String name) { + Id prefixedName = generateId(type, name); + Object value = this.nameCache.get(prefixedName); + if (value == null) { + value = super.getSchema(type, name); + if (value != null) { + // Note: reload all schema if the cache is inconsistent with storage layer + this.clearCache(false); + this.loadAllSchema(); + } + } + return (T) value; + } + + @Override + protected List getAllSchema(HugeType type) { + Boolean cachedAll = this.cachedTypes().getOrDefault(type, false); + List results; + if (cachedAll) { + results = new ArrayList<>(); + // Get from cache + this.idCache.traverse(value -> { + @SuppressWarnings("unchecked") + T schema = (T) value; + if (schema.type() == type) { + results.add(schema); + } + }); + return results; + } else { + results = super.getAllSchema(type); + long free = this.idCache.capacity() - this.idCache.size(); + if (results.size() <= free) { + // Update cache + for (T schema : results) { + Id prefixedId = generateId(schema.type(), schema.id()); + this.idCache.update(prefixedId, schema); + + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + } + this.cachedTypes().putIfAbsent(type, true); + } + return results; + } + } + + private void loadAllSchema() { + getAllSchema(HugeType.PROPERTY_KEY); + getAllSchema(HugeType.VERTEX_LABEL); + getAllSchema(HugeType.EDGE_LABEL); + getAllSchema(HugeType.INDEX_LABEL); + } + + @Override + public void clear() { + // Clear schema info firstly + super.clear(); + this.clearCache(false); + } + + private static final class SchemaCaches { + + private final int size; + + private final IntObjectMap pks; + private final IntObjectMap vls; + private final IntObjectMap els; + private final IntObjectMap ils; + + private final CachedTypes cachedTypes; + + public SchemaCaches(int size) { + // TODO: improve size of each type for optimized array cache + this.size = size; + + this.pks = new IntObjectMap<>(size); + this.vls = new IntObjectMap<>(size); + this.els = new IntObjectMap<>(size); + this.ils = new IntObjectMap<>(size); + + this.cachedTypes = new CachedTypes(); + } + + public void updateIfNeeded(V schema) { + if (schema == null) { + return; + } + Id id = schema.id(); + if (id.number() && id.asLong() > 0L) { + this.set(schema.type(), id, schema); + } + } + + @PerfUtil.Watched + public V get(HugeType type, Id id) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + assert false : id; + return null; + } + int key = (int) longId; + if (key >= this.size) { + return null; + } + switch (type) { + case PROPERTY_KEY: + return this.pks.get(key); + case VERTEX_LABEL: + return this.vls.get(key); + case EDGE_LABEL: + return this.els.get(key); + case INDEX_LABEL: + return this.ils.get(key); + default: + return null; + } + } + + public void set(HugeType type, Id id, V value) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + assert false : id; + return; + } + int key = (int) longId; + if (key >= this.size) { + return; + } + switch (type) { + case PROPERTY_KEY: + this.pks.set(key, value); + break; + case VERTEX_LABEL: + this.vls.set(key, value); + break; + case EDGE_LABEL: + this.els.set(key, value); + break; + case INDEX_LABEL: + this.ils.set(key, value); + break; + default: + // pass + break; + } + } + + public void remove(HugeType type, Id id) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + return; + } + int key = (int) longId; + V value = null; + if (key >= this.size) { + return; + } + switch (type) { + case PROPERTY_KEY: + this.pks.set(key, value); + break; + case VERTEX_LABEL: + this.vls.set(key, value); + break; + case EDGE_LABEL: + this.els.set(key, value); + break; + case INDEX_LABEL: + this.ils.set(key, value); + break; + default: + // pass + break; + } + } + + public void clear() { + this.pks.clear(); + this.vls.clear(); + this.els.clear(); + this.ils.clear(); + + this.cachedTypes.clear(); + } + + public CachedTypes cachedTypes() { + return this.cachedTypes; + } + } + + private static class CachedTypes + extends ConcurrentHashMap { + + private static final long serialVersionUID = -2215549791679355996L; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java index 9082f9a624..37a7e9a9ca 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java @@ -425,6 +425,7 @@ public BackendEntry writeVertex(HugeVertex vertex) { @Override public BackendEntry writeOlapVertex(HugeVertex vertex) { + // TODO: adapt to hstore (merge olap table) BinaryBackendEntry entry = newBackendEntry(HugeType.OLAP, vertex.id()); BytesBuffer buffer = BytesBuffer.allocate(8 + 16); @@ -529,7 +530,6 @@ public HugeEdge readEdge(HugeGraph graph, BackendEntry bytesEntry) { @Override public CIter readEdges(HugeGraph graph, BackendEntry bytesEntry) { - BinaryBackendEntry entry = this.convertEntry(bytesEntry); // Parse id @@ -686,8 +686,10 @@ private Query writeQueryEdgeRangeCondition(ConditionQuery cq) { if (cq.paging() && !cq.page().isEmpty()) { includeStart = true; byte[] position = PageState.fromString(cq.page()).position(); - E.checkArgument(Bytes.compare(position, startId.asBytes()) >= 0, - "Invalid page out of lower bound"); + // FIXME: Due to the inconsistency in the definition of `position` of RocksDB + // scan iterator and Hstore, temporarily remove the following check. + // E.checkArgument(Bytes.compare(position, startId.asBytes()) >= 0, + // "Invalid page out of lower bound"); startId = new BinaryId(position, null); } if (range.keyMax() == null) { @@ -797,8 +799,10 @@ private Query writeRangeIndexQuery(ConditionQuery query) { if (start == null) { return new IdPrefixQuery(query, id); } - E.checkArgument(Bytes.compare(start.asBytes(), id.asBytes()) >= 0, - "Invalid page out of lower bound"); + // FIXME: Due to the inconsistency in the definition of `position` of RocksDB + // scan iterator and Hstore, temporarily remove the following check. + // E.checkArgument(Bytes.compare(start.asBytes(), id.asBytes()) >= 0, + // "Invalid page out of lower bound"); return new IdPrefixQuery(query, start, id); } @@ -827,8 +831,10 @@ private Query writeRangeIndexQuery(ConditionQuery query) { if (start == null) { start = min; } else { - E.checkArgument(Bytes.compare(start.asBytes(), min.asBytes()) >= 0, - "Invalid page out of lower bound"); + // FIXME: Due to the inconsistency in the definition of `position` of RocksDB + // scan iterator and Hstore, temporarily remove the following check. + // E.checkArgument(Bytes.compare(start.asBytes(), min.asBytes()) >= 0, + // "Invalid page out of lower bound"); } if (keyMax == null) { @@ -923,8 +929,10 @@ private static Query prefixQuery(ConditionQuery query, Id prefix) { * the page to id and use it as the starting row for this query */ byte[] position = PageState.fromString(query.page()).position(); - E.checkArgument(Bytes.compare(position, prefix.asBytes()) >= 0, - "Invalid page out of lower bound"); + // FIXME: Due to the inconsistency in the definition of `position` of RocksDB + // scan iterator and Hstore, temporarily remove the following check. + // E.checkArgument(Bytes.compare(position, prefix.asBytes()) >= 0, + // "Invalid page out of lower bound"); BinaryId start = new BinaryId(position, null); newQuery = new IdPrefixQuery(query, start, prefix); } else { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java index b0ae892f70..f293dd2873 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java @@ -115,10 +115,6 @@ public static BytesBuffer wrap(byte[] array, int offset, int length) { return new BytesBuffer(ByteBuffer.wrap(array, offset, length)); } - public static byte getType(int value) { - return (byte) (value & 0x3f); - } - public ByteBuffer asByteBuffer() { return this.buffer; } @@ -601,6 +597,10 @@ public void writeProperty(DataType dataType, Object value) { } } + public static byte getType(int value) { + return (byte) (value & 0x3f); + } + public Object readProperty(DataType dataType) { switch (dataType) { case BOOLEAN: @@ -756,11 +756,11 @@ public BytesBuffer writeIndexId(Id id, HugeType type, boolean withEnding) { public BinaryId readIndexId(HugeType type) { byte[] id; if (type.isRange4Index()) { - // IndexLabel 4 bytes + fieldValue 4 bytes - id = this.read(8); + // HugeTypeCode 1 byte + IndexLabel 4 bytes + fieldValue 4 bytes + id = this.read(9); } else if (type.isRange8Index()) { - // IndexLabel 4 bytes + fieldValue 8 bytes - id = this.read(12); + // HugeTypeCode 1 byte + IndexLabel 4 bytes + fieldValue 8 bytes + id = this.read(13); } else { assert type.isStringIndex(); id = this.readBytesWithEnding(); @@ -796,7 +796,6 @@ public BinaryId parseId(HugeType type, boolean enablePartition) { /** * 解析 olap id - * * @param type * @param isOlap * @return diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java index 02c613aa6d..5a39aeaeaf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java @@ -354,6 +354,7 @@ public HugeEdge readEdge(HugeGraph graph, BackendEntry backendEntry) { throw new NotImplementedException("Unsupported readEdge()"); } + @Override public CIter readEdges(HugeGraph graph, BackendEntry bytesEntry) { E.checkNotNull(graph, "serializer graph"); // TODO: implement diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java index ed3f36f94f..f489e9572a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java @@ -40,9 +40,14 @@ public boolean exists() { } public boolean checkVersion() { + BackendStore store; + if (this.storeProvider.isHstore()) { + store = this.storeProvider.loadGraphStore(this.config); + } else { + store = this.storeProvider.loadSystemStore(this.config); + } String driverVersion = this.storeProvider.driverVersion(); - String storedVersion = this.storeProvider.loadSystemStore(this.config) - .storedVersion(); + String storedVersion = store.storedVersion(); if (!driverVersion.equals(storedVersion)) { LOG.error("The backend driver version '{}' is inconsistent with " + "the data version '{}' of backend store for graph '{}'", diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java index a819451034..44e58c2289 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java @@ -74,4 +74,8 @@ public interface BackendStoreProvider { void onCloneConfig(HugeConfig config, String newGraph); void onDeleteConfig(HugeConfig config); + + default boolean isHstore() { + return "hstore".equals(type()); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java index 825bbcff7d..c482749744 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java @@ -93,7 +93,7 @@ protected Collection tables() { @Override protected final InMemoryDBTable table(HugeType type) { assert type != null; - InMemoryDBTable table = this.tables.get(type); + InMemoryDBTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java index e7f6114400..3905ebb0cc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java @@ -192,7 +192,7 @@ private void updateVertexOlapIndex(HugeVertex vertex, boolean removed) { * @param removed remove or add index */ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); IndexLabel indexLabel = schema.getIndexLabel(ilId); E.checkArgument(indexLabel != null, "Not exist index label with id '%s'", ilId); @@ -732,7 +732,7 @@ private PageIds doIndexQueryOnce(IndexLabel indexLabel, @Watched(prefix = "index") private Set collectMatchedIndexes(ConditionQuery query) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); Id label = query.condition(HugeKeys.LABEL); List schemaLabels; @@ -783,7 +783,7 @@ private Set collectMatchedIndexes(ConditionQuery query) { @Watched(prefix = "index") private MatchedIndex collectMatchedIndex(SchemaLabel schemaLabel, ConditionQuery query) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); Set ils = InsertionOrderUtil.newSet(); for (Id il : schemaLabel.indexLabels()) { IndexLabel indexLabel = schema.getIndexLabel(il); @@ -1749,7 +1749,9 @@ protected long removeIndexLeft(ConditionQuery query, HugeElement element) { if (element.type() != HugeType.VERTEX && element.type() != HugeType.EDGE_OUT && - element.type() != HugeType.EDGE_IN) { + element.type() != HugeType.EDGE_IN && + element.type() != HugeType.TASK && + element.type() != HugeType.SERVER) { throw new HugeException("Only accept element of type VERTEX " + "and EDGE to remove left index, " + "but got: '%s'", element.type()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java index f43557bfcd..10aa73f719 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java @@ -735,16 +735,50 @@ public Vertex queryVertex(Object vertexId) { return vertex; } + public Iterator queryTaskInfos(Query query) { + return this.queryVertices(query); + } + + public Iterator queryTaskInfos(Object... vertexIds) { + if (this.graph().backendStoreFeatures().supportsTaskAndServerVertex()) { + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.TASK); + } + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.VERTEX); + } + + public Iterator queryServerInfos(Query query) { + return this.queryVertices(query); + } + + public Iterator queryServerInfos(Object... vertexIds) { + if (this.graph().backendStoreFeatures().supportsTaskAndServerVertex()) { + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.SERVER); + } + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.VERTEX); + } + + protected Iterator queryVerticesByIds(Object[] vertexIds, + boolean adjacentVertex, + boolean checkMustExist) { + return this.queryVerticesByIds(vertexIds, adjacentVertex, checkMustExist, + HugeType.VERTEX); + } + protected Iterator queryVerticesByIds(Object[] vertexIds, boolean adjacentVertex, - boolean checkMustExist) { + boolean checkMustExist, + HugeType type) { Query.checkForceCapacity(vertexIds.length); // NOTE: allowed duplicated vertices if query by duplicated ids List ids = InsertionOrderUtil.newList(); Map vertices = new HashMap<>(vertexIds.length); - IdQuery query = new IdQuery(HugeType.VERTEX); + IdQuery query = new IdQuery(type); for (Object vertexId : vertexIds) { HugeVertex vertex; Id id = HugeVertex.getIdValue(vertexId); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java new file mode 100644 index 0000000000..ce740d4350 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.tx; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.type.define.SchemaStatus; + +public interface ISchemaTransaction { + List getPropertyKeys(); + + Id removePropertyKey(Id pkey); + + PropertyKey getPropertyKey(Id id); + + PropertyKey getPropertyKey(String name); + + Id clearOlapPk(PropertyKey propertyKey); + + void addVertexLabel(VertexLabel label); + + void updateVertexLabel(VertexLabel label); + + Id removeVertexLabel(Id label); + + List getVertexLabels(); + + VertexLabel getVertexLabel(Id id); + + VertexLabel getVertexLabel(String name); + + List getEdgeLabels(); + + Id addPropertyKey(PropertyKey pkey); + + void updatePropertyKey(PropertyKey pkey); + + void updateEdgeLabel(EdgeLabel label); + + void addEdgeLabel(EdgeLabel label); + + Id removeEdgeLabel(Id id); + + EdgeLabel getEdgeLabel(Id id); + + EdgeLabel getEdgeLabel(String name); + + void addIndexLabel(SchemaLabel schemaLabel, IndexLabel indexLabel); + + void updateIndexLabel(IndexLabel label); + + Id removeIndexLabel(Id id); + + Id rebuildIndex(SchemaElement schema); + + Id rebuildIndex(SchemaElement schema, Set dependencies); + + List getIndexLabels(); + + IndexLabel getIndexLabel(Id id); + + IndexLabel getIndexLabel(String name); + + void close(); + + Id getNextId(HugeType type); + + Id validOrGenerateId(HugeType type, Id id, String name); + + void checkSchemaName(String name); + + String graphName(); + + void updateSchemaStatus(SchemaElement element, SchemaStatus status); + + GraphMode graphMode(); + + boolean existsSchemaId(HugeType type, Id id); + + void removeIndexLabelFromBaseLabel(IndexLabel indexLabel); + + void createIndexLabelForOlapPk(PropertyKey propertyKey); + + void removeSchema(SchemaElement schema); +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java new file mode 100644 index 0000000000..87a10ce2c8 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.tx; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; + +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.grpc.Pdpb; +import org.apache.hugegraph.store.term.HgPair; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; + +public class IdCounter { + + private static final int TIMES = 10000; + private static final int DELTA = 10000; + private static final String DELIMITER = "/"; + private static final Map> ids = + new ConcurrentHashMap<>(); + private final PDClient pdClient; + private final String graphName; + + public IdCounter(PDClient pdClient, String graphName) { + this.graphName = graphName; + this.pdClient = pdClient; + } + + public Id nextId(HugeType type) { + long counter = this.getCounter(type); + E.checkState(counter != 0L, "Please check whether '%s' is OK", + this.pdClient.toString()); + return IdGenerator.of(counter); + } + + public void setCounterLowest(HugeType type, long lowest) { + long current = this.getCounter(type); + if (current >= lowest) { + return; + } + long increment = lowest - current; + this.increaseCounter(type, increment); + } + + public long getCounter(HugeType type) { + return this.getCounterFromPd(type); + } + + public synchronized void increaseCounter(HugeType type, long lowest) { + String key = toKey(this.graphName, type); + getCounterFromPd(type); + HgPair idPair = ids.get(key); + AtomicLong currentId = idPair.getKey(); + AtomicLong maxId = idPair.getValue(); + if (currentId.longValue() >= lowest) { + return; + } + if (maxId.longValue() >= lowest) { + currentId.set(lowest); + return; + } + synchronized (ids) { + try { + this.pdClient.getIdByKey(key, (int) (lowest - maxId.longValue())); + ids.remove(key); + } catch (Exception e) { + throw new BackendException(e); + } + } + } + + protected String toKey(String graphName, HugeType type) { + return new StringBuilder().append(graphName) + .append(DELIMITER) + .append(type.code()).toString(); + } + + public long getCounterFromPd(HugeType type) { + AtomicLong currentId; + AtomicLong maxId; + HgPair idPair; + String key = toKey(this.graphName, type); + if ((idPair = ids.get(key)) == null) { + synchronized (ids) { + if ((idPair = ids.get(key)) == null) { + try { + currentId = new AtomicLong(0); + maxId = new AtomicLong(0); + idPair = new HgPair<>(currentId, maxId); + ids.put(key, idPair); + } catch (Exception e) { + throw new BackendException(String.format( + "Failed to get the ID from pd,%s", e)); + } + } + } + } + currentId = idPair.getKey(); + maxId = idPair.getValue(); + for (int i = 0; i < TIMES; i++) { + synchronized (currentId) { + if ((currentId.incrementAndGet()) <= maxId.longValue()) { + return currentId.longValue(); + } + if (currentId.longValue() > maxId.longValue()) { + try { + Pdpb.GetIdResponse idByKey = pdClient.getIdByKey(key, DELTA); + idPair.getValue().getAndSet(idByKey.getId() + + idByKey.getDelta()); + idPair.getKey().getAndSet(idByKey.getId()); + } catch (Exception e) { + throw new BackendException(String.format( + "Failed to get the ID from pd,%s", e)); + } + } + } + } + E.checkArgument(false, + "Having made too many attempts to get the" + + " ID for type '%s'", type.name()); + return 0L; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java index 1a42cf705a..1d7f595f0d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java @@ -68,7 +68,7 @@ import com.google.common.collect.ImmutableSet; -public class SchemaTransaction extends IndexableTransaction { +public class SchemaTransaction extends IndexableTransaction implements ISchemaTransaction { private final SchemaIndexTransaction indexTx; private final SystemSchemaStore systemSchemaStore; @@ -524,7 +524,8 @@ protected List getAllSchema(HugeType type) { return results; } - protected void removeSchema(SchemaElement schema) { + @Override + public void removeSchema(SchemaElement schema) { LOG.debug("SchemaTransaction remove {} by id '{}'", schema.type(), schema.id()); // System schema just remove from SystemSchemaStore in memory diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java new file mode 100644 index 0000000000..c199cfa129 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java @@ -0,0 +1,757 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.tx; + +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.exception.NotAllowException; +import org.apache.hugegraph.job.JobBuilder; +import org.apache.hugegraph.job.schema.EdgeLabelRemoveJob; +import org.apache.hugegraph.job.schema.IndexLabelRebuildJob; +import org.apache.hugegraph.job.schema.IndexLabelRemoveJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyClearJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyCreateJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyRemoveJob; +import org.apache.hugegraph.job.schema.SchemaJob; +import org.apache.hugegraph.job.schema.VertexLabelRemoveJob; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.meta.PdMetaDriver; +import org.apache.hugegraph.meta.managers.SchemaMetaManager; +import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.schema.Userdata; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.task.HugeTask; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.type.define.SchemaStatus; +import org.apache.hugegraph.type.define.WriteType; +import org.apache.hugegraph.util.DateUtil; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.LockUtil; +import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.slf4j.Logger; + +import com.google.common.collect.ImmutableSet; + +public class SchemaTransactionV2 implements ISchemaTransaction { + + protected static final Logger LOG = Log.logger(SchemaTransaction.class); + + private final String graphSpace; + private final String graph; + private final HugeGraphParams graphParams; + private final IdCounter idCounter; + private final SchemaMetaManager schemaMetaManager; + + public SchemaTransactionV2(MetaDriver metaDriver, + String cluster, + HugeGraphParams graphParams) { + E.checkNotNull(graphParams, "graphParams"); + this.graphParams = graphParams; + // TODO: uncomment later - graph space + //this.graphSpace = graphParams.graph().graphSpace(); + this.graphSpace = ""; + this.graph = graphParams.name(); + this.schemaMetaManager = + new SchemaMetaManager(metaDriver, cluster, this.graph()); + this.idCounter = new IdCounter(((PdMetaDriver) metaDriver).pdClient(), + idKeyName(this.graphSpace, this.graph)); + } + + private static void setCreateTimeIfNeeded(SchemaElement schema) { + if (!schema.userdata().containsKey(Userdata.CREATE_TIME)) { + schema.userdata(Userdata.CREATE_TIME, DateUtil.now()); + } + } + + /** + * 异步任务系列 + */ + private static Id asyncRun(HugeGraph graph, SchemaElement schema, + SchemaJob job) { + return asyncRun(graph, schema, job, ImmutableSet.of()); + } + + @Watched(prefix = "schema") + private static Id asyncRun(HugeGraph graph, SchemaElement schema, + SchemaJob job, Set dependencies) { + E.checkArgument(schema != null, "Schema can't be null"); + String name = SchemaJob.formatTaskName(schema.type(), + schema.id(), + schema.name()); + + JobBuilder builder = JobBuilder.of(graph).name(name) + .job(job) + .dependencies(dependencies); + HugeTask task = builder.schedule(); + // If TASK_SYNC_DELETION is true, wait async thread done before + // continue. This is used when running tests. + if (graph.option(CoreOptions.TASK_SYNC_DELETION)) { + task.syncWait(); + } + return task.id(); + } + + public String idKeyName(String graphSpace, String graph) { + // {graphSpace}/{graph}/m "m" means "schema" + return String.join("/", graphSpace, graph, "m"); + } + + @Watched(prefix = "schema") + public List getPropertyKeys(boolean cache) { + return this.getAllSchema(HugeType.PROPERTY_KEY); + } + + @Watched(prefix = "schema") + public List getPropertyKeys() { + return this.getAllSchema(HugeType.PROPERTY_KEY); + } + + @Watched(prefix = "schema") + public List getVertexLabels() { + return this.getAllSchema(HugeType.VERTEX_LABEL); + } + + @Watched(prefix = "schema") + public List getEdgeLabels() { + return this.getAllSchema(HugeType.EDGE_LABEL); + } + + @Watched(prefix = "schema") + public List getIndexLabels() { + return this.getAllSchema(HugeType.INDEX_LABEL); + } + + @Watched(prefix = "schema") + public Id addPropertyKey(PropertyKey propertyKey) { + this.addSchema(propertyKey); + if (!propertyKey.olap()) { + return IdGenerator.ZERO; + } + return this.createOlapPk(propertyKey); + } + + @Watched(prefix = "schema") + public void updatePropertyKey(PropertyKey propertyKey) { + this.updateSchema(propertyKey, null); + } + + public void updatePropertyKey(PropertyKey old, PropertyKey update) { + this.removePropertyKey(old.id()); + this.addPropertyKey(update); + } + + @Watched(prefix = "schema") + public PropertyKey getPropertyKey(Id id) { + E.checkArgumentNotNull(id, "Property key id can't be null"); + return this.getSchema(HugeType.PROPERTY_KEY, id); + } + + @Watched(prefix = "schema") + public PropertyKey getPropertyKey(String name) { + E.checkArgumentNotNull(name, "Property key name can't be null"); + E.checkArgument(!name.isEmpty(), "Property key name can't be empty"); + return this.getSchema(HugeType.PROPERTY_KEY, name); + } + + @Watched(prefix = "schema") + public Id removePropertyKey(Id id) { + LOG.debug("SchemaTransaction remove property key '{}'", id); + PropertyKey propertyKey = this.getPropertyKey(id); + // If the property key does not exist, return directly + if (propertyKey == null) { + return null; + } + + List vertexLabels = this.getVertexLabels(); + for (VertexLabel vertexLabel : vertexLabels) { + if (vertexLabel.properties().contains(id)) { + throw new NotAllowException( + "Not allowed to remove property key: '%s' " + + "because the vertex label '%s' is still using it.", + propertyKey, vertexLabel.name()); + } + } + + List edgeLabels = this.getEdgeLabels(); + for (EdgeLabel edgeLabel : edgeLabels) { + if (edgeLabel.properties().contains(id)) { + throw new NotAllowException( + "Not allowed to remove property key: '%s' " + + "because the edge label '%s' is still using it.", + propertyKey, edgeLabel.name()); + } + } + if (propertyKey.oltp()) { + this.removeSchema(propertyKey); + return IdGenerator.ZERO; + } else { + return this.removeOlapPk(propertyKey); + } + } + + @Watched(prefix = "schema") + public void addVertexLabel(VertexLabel vertexLabel) { + this.addSchema(vertexLabel); + } + + @Watched(prefix = "schema") + public void updateVertexLabel(VertexLabel vertexLabel) { + this.updateSchema(vertexLabel, null); + } + + @Watched(prefix = "schema") + public VertexLabel getVertexLabel(Id id) { + E.checkArgumentNotNull(id, "Vertex label id can't be null"); + if (SchemaElement.OLAP_ID.equals(id)) { + return VertexLabel.OLAP_VL; + } + return this.getSchema(HugeType.VERTEX_LABEL, id); + } + + @Watched(prefix = "schema") + public VertexLabel getVertexLabel(String name) { + E.checkArgumentNotNull(name, "Vertex label name can't be null"); + E.checkArgument(!name.isEmpty(), "Vertex label name can't be empty"); + if (SchemaElement.OLAP.equals(name)) { + return VertexLabel.OLAP_VL; + } + return this.getSchema(HugeType.VERTEX_LABEL, name); + } + + @Watched(prefix = "schema") + public Id removeVertexLabel(Id id) { + LOG.debug("SchemaTransaction remove vertex label '{}'", id); + SchemaJob job = new VertexLabelRemoveJob(); + VertexLabel schema = this.getVertexLabel(id); + return asyncRun(this.graph(), schema, job); + } + + @Watched(prefix = "schema") + public void addEdgeLabel(EdgeLabel edgeLabel) { + this.addSchema(edgeLabel); + } + + @Watched(prefix = "schema") + public void updateEdgeLabel(EdgeLabel edgeLabel) { + this.updateSchema(edgeLabel, null); + } + + @Watched(prefix = "schema") + public EdgeLabel getEdgeLabel(Id id) { + E.checkArgumentNotNull(id, "Edge label id can't be null"); + return this.getSchema(HugeType.EDGE_LABEL, id); + } + + @Watched(prefix = "schema") + public EdgeLabel getEdgeLabel(String name) { + E.checkArgumentNotNull(name, "Edge label name can't be null"); + E.checkArgument(!name.isEmpty(), "Edge label name can't be empty"); + return this.getSchema(HugeType.EDGE_LABEL, name); + } + + @Watched(prefix = "schema") + public Id removeEdgeLabel(Id id) { + /* + * Call an asynchronous task and call back the corresponding + * removeSchema() method after the task ends to complete the delete + * schema operation + */ + LOG.debug("SchemaTransaction remove edge label '{}'", id); + EdgeLabel schema = this.getEdgeLabel(id); + // TODO: uncomment later - sub edge labels + //if (schema.edgeLabelType().parent()) { + // List edgeLabels = this.getEdgeLabels(); + // for (EdgeLabel edgeLabel : edgeLabels) { + // if (edgeLabel.edgeLabelType().sub() && + // edgeLabel.fatherId() == id) { + // throw new NotAllowException( + // "Not allowed to remove a parent edge label: '%s' " + + // "because the sub edge label '%s' is still existing", + // schema.name(), edgeLabel.name()); + // } + // } + //} + SchemaJob job = new EdgeLabelRemoveJob(); + return asyncRun(this.graph(), schema, job); + } + + @Watched(prefix = "schema") + public void addIndexLabel(SchemaLabel baseLabel, IndexLabel indexLabel) { + /* + * Create index and update index name in base-label(VL/EL) + * TODO: should wrap update base-label and create index in one tx. + */ + this.addSchema(indexLabel); + + if (baseLabel.equals(VertexLabel.OLAP_VL)) { + return; + } + + this.updateSchema(baseLabel, schema -> { + // NOTE: Do schema update in the lock block + baseLabel.addIndexLabel(indexLabel.id()); + }); + } + + @Watched(prefix = "schema") + public void updateIndexLabel(IndexLabel indexLabel) { + this.updateSchema(indexLabel, null); + } + + @Watched(prefix = "schema") + public IndexLabel getIndexLabel(Id id) { + E.checkArgumentNotNull(id, "Index label id can't be null"); + return this.getSchema(HugeType.INDEX_LABEL, id); + } + + @Watched(prefix = "schema") + public IndexLabel getIndexLabel(String name) { + E.checkArgumentNotNull(name, "Index label name can't be null"); + E.checkArgument(!name.isEmpty(), "Index label name can't be empty"); + return this.getSchema(HugeType.INDEX_LABEL, name); + } + + @Override + public void close() { + + } + + @Watched(prefix = "schema") + public Id removeIndexLabel(Id id) { + LOG.debug("SchemaTransaction remove index label '{}'", id); + SchemaJob job = new IndexLabelRemoveJob(); + IndexLabel schema = this.getIndexLabel(id); + return asyncRun(this.graph(), schema, job); + } + + // 通用性 的schema处理函数 + @Watched(prefix = "schema") + public void updateSchemaStatus(SchemaElement schema, SchemaStatus status) { + if (!this.existsSchemaId(schema.type(), schema.id())) { + LOG.warn("Can't update schema '{}', it may be deleted", schema); + return; + } + + this.updateSchema(schema, schemaToUpdate -> { + // NOTE: Do schema update in the lock block + schema.status(status); + }); + } + + @Watched(prefix = "schema") + public boolean existsSchemaId(HugeType type, Id id) { + return this.getSchema(type, id) != null; + } + + @Override + public void removeIndexLabelFromBaseLabel(IndexLabel indexLabel) { + HugeType baseType = indexLabel.baseType(); + Id baseValue = indexLabel.baseValue(); + SchemaLabel baseLabel; + if (baseType == HugeType.VERTEX_LABEL) { + baseLabel = this.getVertexLabel(baseValue); + } else { + assert baseType == HugeType.EDGE_LABEL; + baseLabel = this.getEdgeLabel(baseValue); + } + + if (baseLabel == null) { + LOG.info("The base label '{}' of index label '{}' " + + "may be deleted before", baseValue, indexLabel); + return; + } + if (baseLabel.equals(VertexLabel.OLAP_VL)) { + return; + } + + this.updateSchema(baseLabel, schema -> { + // NOTE: Do schema update in the lock block + baseLabel.removeIndexLabel(indexLabel.id()); + }); + } + + protected void updateSchema(SchemaElement schema, + Consumer updateCallback) { + LOG.debug("SchemaTransaction update {} with id '{}'", + schema.type(), schema.id()); + this.saveSchema(schema, true, updateCallback); + } + + protected void addSchema(SchemaElement schema) { + LOG.debug("SchemaTransaction add {} with id '{}'", + schema.type(), schema.id()); + setCreateTimeIfNeeded(schema); + this.saveSchema(schema, false, null); + } + + @SuppressWarnings("unchecked") + private void saveSchema(SchemaElement schema, boolean update, + Consumer updateCallback) { + // Lock for schema update + // TODO: uncomment later - graph space + //String spaceGraph = this.graphParams() + // .graph().spaceGraphName(); + LockUtil.Locks locks = new LockUtil.Locks(graph); + try { + locks.lockWrites(LockUtil.hugeType2Group(schema.type()), schema.id()); + + if (updateCallback != null) { + // NOTE: Do schema update in the lock block + updateCallback.accept(schema); + } + // 调对应的方法 + switch (schema.type()) { + case PROPERTY_KEY: + this.schemaMetaManager.addPropertyKey(this.graphSpace, + this.graph, + (PropertyKey) schema); + break; + case VERTEX_LABEL: + this.schemaMetaManager.addVertexLabel(this.graphSpace, + this.graph, + (VertexLabel) schema); + // 点的label发生变化, 清空对应图的点缓存信息 + MetaManager.instance().notifyGraphVertexCacheClear(this.graphSpace, this.graph); + break; + case EDGE_LABEL: + this.schemaMetaManager.addEdgeLabel(this.graphSpace, + this.graph, + (EdgeLabel) schema); + // 边的label发生变化, 清空对应图的边缓存信息 + MetaManager.instance().notifyGraphEdgeCacheClear(this.graphSpace, this.graph); + break; + case INDEX_LABEL: + this.schemaMetaManager.addIndexLabel(this.graphSpace, + this.graph, + (IndexLabel) schema); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for saveSchema", schema.type())); + } + } finally { + locks.unlock(); + } + } + + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, Id id) { + LOG.debug("SchemaTransaction get {} by id '{}'", + type.readableName(), id); + switch (type) { + case PROPERTY_KEY: + return (T) this.schemaMetaManager.getPropertyKey(this.graphSpace, + this.graph, id); + case VERTEX_LABEL: + return (T) this.schemaMetaManager.getVertexLabel(this.graphSpace, + this.graph, id); + case EDGE_LABEL: + return (T) this.schemaMetaManager.getEdgeLabel(this.graphSpace, + this.graph, id); + case INDEX_LABEL: + return (T) this.schemaMetaManager.getIndexLabel(this.graphSpace, + this.graph, id); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + /** + * Currently doesn't allow to exist schema with the same name + * + * @param type the query schema type + * @param name the query schema name + * @param SubClass of SchemaElement + * @return the queried schema object + */ + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, String name) { + LOG.debug("SchemaTransaction get {} by name '{}'", + type.readableName(), name); + switch (type) { + case PROPERTY_KEY: + return (T) this.schemaMetaManager.getPropertyKey(this.graphSpace, + this.graph, name); + case VERTEX_LABEL: + return (T) this.schemaMetaManager.getVertexLabel(this.graphSpace, + this.graph, name); + case EDGE_LABEL: + return (T) this.schemaMetaManager.getEdgeLabel(this.graphSpace, + this.graph, name); + case INDEX_LABEL: + return (T) this.schemaMetaManager.getIndexLabel(this.graphSpace, + this.graph, name); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + @SuppressWarnings("unchecked") + protected List getAllSchema(HugeType type) { + LOG.debug("SchemaTransaction getAllSchema {}", type.readableName()); + switch (type) { + case PROPERTY_KEY: + return (List) this.schemaMetaManager.getPropertyKeys(this.graphSpace, + this.graph); + case VERTEX_LABEL: + return (List) this.schemaMetaManager.getVertexLabels(this.graphSpace, + this.graph); + case EDGE_LABEL: + return (List) this.schemaMetaManager.getEdgeLabels(this.graphSpace, this.graph); + case INDEX_LABEL: + return (List) this.schemaMetaManager.getIndexLabels(this.graphSpace, this.graph); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + @Override + public void removeSchema(SchemaElement schema) { + LOG.debug("SchemaTransaction remove {} by id '{}'", + schema.type(), schema.id()); + // TODO: uncomment later - graph space + //String spaceGraph = this.graphParams() + // .graph().spaceGraphName(); + LockUtil.Locks locks = new LockUtil.Locks(graph); + try { + locks.lockWrites(LockUtil.hugeType2Group(schema.type()), + schema.id()); + switch (schema.type()) { + case PROPERTY_KEY: + this.schemaMetaManager.removePropertyKey(this.graphSpace, this.graph, + schema.id()); + break; + case VERTEX_LABEL: + this.schemaMetaManager.removeVertexLabel(this.graphSpace, this.graph, + schema.id()); + break; + case EDGE_LABEL: + this.schemaMetaManager.removeEdgeLabel(this.graphSpace, this.graph, + schema.id()); + break; + case INDEX_LABEL: + this.schemaMetaManager.removeIndexLabel(this.graphSpace, this.graph, + schema.id()); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for saveSchema", schema.type())); + } + } finally { + locks.unlock(); + } + } + + // olap 相关的方法 + public void createIndexLabelForOlapPk(PropertyKey propertyKey) { + WriteType writeType = propertyKey.writeType(); + if (writeType == WriteType.OLTP || + writeType == WriteType.OLAP_COMMON) { + return; + } + + String indexName = SchemaElement.OLAP + "_by_" + propertyKey.name(); + IndexLabel.Builder builder = this.graph().schema() + .indexLabel(indexName) + .onV(SchemaElement.OLAP) + .by(propertyKey.name()); + if (propertyKey.writeType() == WriteType.OLAP_SECONDARY) { + builder.secondary(); + } else { + assert propertyKey.writeType() == WriteType.OLAP_RANGE; + builder.range(); + } + builder.build(); + this.graph().addIndexLabel(VertexLabel.OLAP_VL, builder.build()); + } + + public Id removeOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction remove olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyRemoveJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + public void removeOlapPk(Id id) { + this.graphParams().loadGraphStore().removeOlapTable(id); + } + + public Id clearOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction clear olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyClearJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + public void clearOlapPk(Id id) { + this.graphParams().loadGraphStore().clearOlapTable(id); + } + + public Id createOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction create olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyCreateJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + // -- store 相关的方法,分为两类:1、olaptable相关 2、id生成策略 + // - 1、olaptable相关 + public void createOlapPk(Id id) { + this.graphParams().loadGraphStore().createOlapTable(id); + } + + public boolean existOlapTable(Id id) { + return this.graphParams().loadGraphStore().existOlapTable(id); + } + + public void initAndRegisterOlapTables() { + for (PropertyKey pk : this.getPropertyKeys()) { + if (pk.olap()) { + this.graphParams().loadGraphStore().checkAndRegisterOlapTable(pk.id()); + } + } + } + + // - 2、id生成策略 + @Watched(prefix = "schema") + public Id getNextId(HugeType type) { + LOG.debug("SchemaTransaction get next id for {}", type); + return this.idCounter.nextId(type); + } + + @Watched(prefix = "schema") + public void setNextIdLowest(HugeType type, long lowest) { + LOG.debug("SchemaTransaction set next id to {} for {}", lowest, type); + this.idCounter.setCounterLowest(type, lowest); + } + + @Watched(prefix = "schema") + public Id getNextSystemId() { + LOG.debug("SchemaTransaction get next system id"); + Id id = this.idCounter.nextId(HugeType.SYS_SCHEMA); + return IdGenerator.of(-id.asLong()); + } + + @Watched(prefix = "schema") + public Id validOrGenerateId(HugeType type, Id id, String name) { + boolean forSystem = Graph.Hidden.isHidden(name); + if (id != null) { + this.checkIdAndUpdateNextId(type, id, name, forSystem); + } else { + if (forSystem) { + id = this.getNextSystemId(); + } else { + id = this.getNextId(type); + } + } + return id; + } + + private void checkIdAndUpdateNextId(HugeType type, Id id, + String name, boolean forSystem) { + if (forSystem) { + if (id.number() && id.asLong() < 0) { + return; + } + throw new IllegalStateException(String.format( + "Invalid system id '%s'", id)); + } + E.checkState(id.number() && id.asLong() > 0L, + "Schema id must be number and >0, but got '%s'", id); + GraphMode mode = this.graphMode(); + E.checkState(mode == GraphMode.RESTORING, + "Can't build schema with provided id '%s' " + + "when graph '%s' in mode '%s'", id, this.graph, mode); + this.setNextIdLowest(type, id.asLong()); + } + + // 功能型函数 + public void checkSchemaName(String name) { + String illegalReg = this.graphParams().configuration() + .get(CoreOptions.SCHEMA_ILLEGAL_NAME_REGEX); + E.checkNotNull(name, "name"); + E.checkArgument(!name.isEmpty(), "The name can't be empty."); + E.checkArgument(name.length() < 256, + "The length of name must less than 256 bytes."); + E.checkArgument(!name.matches(illegalReg), + "Illegal schema name '%s'", name); + + final char[] filters = {'#', '>', ':', '!'}; + for (char c : filters) { + E.checkArgument(name.indexOf(c) == -1, + "The name can't contain character '%s'.", c); + } + } + + @Override + public String graphName() { + return this.graph; + } + + protected HugeGraphParams graphParams() { + return this.graphParams; + } + + public GraphMode graphMode() { + return this.graphParams().mode(); + } + + // 获取字段的方法 + public HugeGraph graph() { + return this.graphParams.graph(); + } + + // 重建索引 + @Watched(prefix = "schema") + public Id rebuildIndex(SchemaElement schema) { + return this.rebuildIndex(schema, ImmutableSet.of()); + } + + @Watched(prefix = "schema") + public Id rebuildIndex(SchemaElement schema, Set dependencies) { + LOG.debug("SchemaTransaction rebuild index for {} with id '{}'", + schema.type(), schema.id()); + SchemaJob job = new IndexLabelRebuildJob(); + return asyncRun(this.graph(), schema, job, dependencies); + } + + /** + * 清除所有的schema信息 + */ + public void clear() { + this.schemaMetaManager.clearAllSchema(this.graphSpace, graph); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java index f3f9599b0b..e8d3461e34 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -44,7 +45,7 @@ public Object execute() { private static void removeEdgeLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); EdgeLabel edgeLabel = schemaTx.getEdgeLabel(id); // If the edge label does not exist, return directly if (edgeLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java index 2158c10c34..5e4acf92e9 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.IndexLabel; @@ -83,7 +84,7 @@ private void rebuildIndex(SchemaElement schema) { } private void rebuildIndex(SchemaLabel label, Collection indexLabelIds) { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); GraphTransaction graphTx = this.params().graphTransaction(); Consumer indexUpdater = (elem) -> { @@ -148,7 +149,7 @@ private void rebuildIndex(SchemaLabel label, Collection indexLabelIds) { } private void removeIndex(Collection indexLabelIds) { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); GraphTransaction graphTx = this.params().graphTransaction(); for (Id id : indexLabelIds) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java index 418eb6116f..b87978d8b2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java @@ -20,6 +20,7 @@ import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -40,7 +41,7 @@ public Object execute() { protected static void removeIndexLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); IndexLabel indexLabel = schemaTx.getIndexLabel(id); // If the index label does not exist, return directly if (indexLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java index bb740a3c05..155a6b8bfa 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java @@ -20,6 +20,7 @@ import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -50,7 +51,7 @@ protected static void clearIndexLabel(HugeGraphParams graph, Id id) { return; } GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); IndexLabel indexLabel = schemaTx.getIndexLabel(olapIndexLabel); // If the index label does not exist, return directly if (indexLabel == null) { @@ -80,7 +81,7 @@ protected static void clearIndexLabel(HugeGraphParams graph, Id id) { } protected static Id findOlapIndexLabel(HugeGraphParams graph, Id olap) { - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); for (IndexLabel indexLabel : schemaTx.getIndexLabels()) { if (indexLabel.indexFields().contains(olap)) { return indexLabel.id(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java index bb2314ca04..272ebbc3e5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java @@ -17,6 +17,7 @@ package org.apache.hugegraph.job.schema; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; @@ -29,7 +30,7 @@ public String type() { @Override public Object execute() { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); PropertyKey propertyKey = schemaTx.getPropertyKey(this.schemaId()); // Create olap index label schema schemaTx.createIndexLabelForOlapPk(propertyKey); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java index 678e2249ba..200ef51c37 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java @@ -18,6 +18,7 @@ package org.apache.hugegraph.job.schema; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; @@ -42,7 +43,7 @@ public Object execute() { } // Remove olap property key - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); PropertyKey propertyKey = schemaTx.getPropertyKey(olap); removeSchema(schemaTx, propertyKey); return null; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java index 2f553635ac..3165351396 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java @@ -22,7 +22,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.job.SysJob; import org.apache.hugegraph.schema.SchemaElement; import org.apache.hugegraph.type.HugeType; @@ -85,10 +85,10 @@ public static String formatTaskName(HugeType type, Id id, String name) { * @param tx The remove operation actual executer * @param schema the schema to be removed */ - protected static void removeSchema(SchemaTransaction tx, + protected static void removeSchema(ISchemaTransaction tx, SchemaElement schema) { try { - Method method = SchemaTransaction.class + Method method = ISchemaTransaction.class .getDeclaredMethod("removeSchema", SchemaElement.class); method.setAccessible(true); @@ -108,10 +108,10 @@ protected static void removeSchema(SchemaTransaction tx, * @param tx The update operation actual execute * @param schema the schema to be updated */ - protected static void updateSchema(SchemaTransaction tx, + protected static void updateSchema(ISchemaTransaction tx, SchemaElement schema) { try { - Method method = SchemaTransaction.class + Method method = ISchemaTransaction.class .getDeclaredMethod("updateSchema", SchemaElement.class); method.setAccessible(true); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java index f00f316ab9..86c059e8f7 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.HugeGraphParams; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.VertexLabel; @@ -47,7 +48,7 @@ public Object execute() { private static void removeVertexLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); VertexLabel vertexLabel = schemaTx.getVertexLabel(id); // If the vertex label does not exist, return directly if (vertexLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardClusterRoleStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardClusterRoleStore.java index 4885410c7d..fa1cc6a617 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardClusterRoleStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardClusterRoleStore.java @@ -146,7 +146,12 @@ private ClusterRole from(Vertex vertex) { private Optional queryVertex() { GraphTransaction tx = this.graph.systemTransaction(); - ConditionQuery query = new ConditionQuery(HugeType.VERTEX); + ConditionQuery query; + if (this.graph.backendStoreFeatures().supportsTaskAndServerVertex()) { + query = new ConditionQuery(HugeType.SERVER); + } else { + query = new ConditionQuery(HugeType.VERTEX); + } VertexLabel vl = this.graph.graph().vertexLabel(P.ROLE_DATA); query.eq(HugeKeys.LABEL, vl.id()); query.query(Condition.eq(vl.primaryKeys().get(0), "default")); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java new file mode 100644 index 0000000000..8c9600e6b9 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java @@ -0,0 +1,322 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.io.File; +import java.net.URI; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; + +import org.apache.commons.io.FileUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.lock.EtcdDistributedLock; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.collection.CollectionFactory; + +import com.google.common.base.Strings; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.ClientBuilder; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.KeyValue; +import io.etcd.jetcd.kv.GetResponse; +import io.etcd.jetcd.lease.LeaseKeepAliveResponse; +import io.etcd.jetcd.options.DeleteOption; +import io.etcd.jetcd.options.GetOption; +import io.etcd.jetcd.options.WatchOption; +import io.etcd.jetcd.watch.WatchEvent; +import io.etcd.jetcd.watch.WatchResponse; +import io.netty.handler.ssl.ApplicationProtocolConfig; +import io.netty.handler.ssl.ApplicationProtocolNames; +import io.netty.handler.ssl.SslContext; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.SslProvider; + +public class EtcdMetaDriver implements MetaDriver { + + private final Client client; + private final EtcdDistributedLock lock; + + public EtcdMetaDriver(String trustFile, String clientCertFile, + String clientKeyFile, Object... endpoints) { + ClientBuilder builder = this.etcdMetaDriverBuilder(endpoints); + + SslContext sslContext = openSslContext(trustFile, clientCertFile, + clientKeyFile); + this.client = builder.sslContext(sslContext).build(); + this.lock = EtcdDistributedLock.getInstance(this.client); + } + + public EtcdMetaDriver(Object... endpoints) { + ClientBuilder builder = this.etcdMetaDriverBuilder(endpoints); + this.client = builder.build(); + this.lock = EtcdDistributedLock.getInstance(this.client); + } + + private static ByteSequence toByteSequence(String content) { + return ByteSequence.from(content.getBytes()); + } + + private static boolean isEtcdPut(WatchEvent event) { + return event.getEventType() == WatchEvent.EventType.PUT; + } + + public static SslContext openSslContext(String trustFile, + String clientCertFile, + String clientKeyFile) { + SslContext ssl; + try { + File trustManagerFile = FileUtils.getFile(trustFile); + File keyCertChainFile = FileUtils.getFile(clientCertFile); + File KeyFile = FileUtils.getFile(clientKeyFile); + ApplicationProtocolConfig alpn = new ApplicationProtocolConfig( + ApplicationProtocolConfig.Protocol.ALPN, + ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE, + + ApplicationProtocolConfig.SelectedListenerFailureBehavior + .ACCEPT, + ApplicationProtocolNames.HTTP_2); + + ssl = SslContextBuilder.forClient() + .applicationProtocolConfig(alpn) + .sslProvider(SslProvider.OPENSSL) + .trustManager(trustManagerFile) + .keyManager(keyCertChainFile, KeyFile) + .build(); + } catch (Exception e) { + throw new HugeException("Failed to open ssl context", e); + } + return ssl; + } + + public ClientBuilder etcdMetaDriverBuilder(Object... endpoints) { + int length = endpoints.length; + ClientBuilder builder = null; + if (endpoints[0] instanceof List && endpoints.length == 1) { + builder = Client.builder() + .endpoints(((List) endpoints[0]) + .toArray(new String[0])); + } else if (endpoints[0] instanceof String) { + for (int i = 1; i < length; i++) { + E.checkArgument(endpoints[i] instanceof String, + "Inconsistent endpoint %s(%s) with %s(%s)", + endpoints[i], endpoints[i].getClass(), + endpoints[0], endpoints[0].getClass()); + } + builder = Client.builder().endpoints((String[]) endpoints); + } else if (endpoints[0] instanceof URI) { + for (int i = 1; i < length; i++) { + E.checkArgument(endpoints[i] instanceof String, + "Invalid endpoint %s(%s)", + endpoints[i], endpoints[i].getClass(), + endpoints[0], endpoints[0].getClass()); + } + builder = Client.builder().endpoints((URI[]) endpoints); + } else { + E.checkArgument(false, "Invalid endpoint %s(%s)", + endpoints[0], endpoints[0].getClass()); + } + return builder; + } + + @Override + public long keepAlive(String key, long leaseId) { + try { + LeaseKeepAliveResponse response = + this.client.getLeaseClient().keepAliveOnce(leaseId).get(); + return response.getID(); + } catch (InterruptedException | ExecutionException e) { + // keepAlive once Failed + return 0; + } + } + + @Override + public String get(String key) { + List keyValues; + KV kvClient = this.client.getKVClient(); + try { + keyValues = kvClient.get(toByteSequence(key)) + .get().getKvs(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to get key '%s' from etcd", e, key); + } + + if (!keyValues.isEmpty()) { + return keyValues.get(0).getValue().toString(Charset.defaultCharset()); + } + + return null; + } + + @Override + public void put(String key, String value) { + KV kvClient = this.client.getKVClient(); + try { + kvClient.put(toByteSequence(key), toByteSequence(value)).get(); + } catch (InterruptedException | ExecutionException e) { + try { + kvClient.delete(toByteSequence(key)).get(); + } catch (Throwable t) { + throw new HugeException("Failed to put '%s:%s' to etcd", + e, key, value); + } + } + } + + @Override + public void delete(String key) { + KV kvClient = this.client.getKVClient(); + try { + kvClient.delete(toByteSequence(key)).get(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException( + "Failed to delete key '%s' from etcd", e, key); + } + } + + @Override + public void deleteWithPrefix(String prefix) { + KV kvClient = this.client.getKVClient(); + try { + DeleteOption option = DeleteOption.newBuilder() + .isPrefix(true) + .build(); + kvClient.delete(toByteSequence(prefix), option); + } catch (Throwable e) { + throw new HugeException( + "Failed to delete prefix '%s' from etcd", e, prefix); + } + } + + @Override + public Map scanWithPrefix(String prefix) { + GetOption getOption = GetOption.newBuilder() + .isPrefix(true) + .build(); + GetResponse response; + try { + response = this.client.getKVClient().get(toByteSequence(prefix), + getOption).get(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to scan etcd with prefix '%s'", + e, prefix); + } + int size = (int) response.getCount(); + Map keyValues = CollectionFactory.newMap( + CollectionType.JCF, size); + for (KeyValue kv : response.getKvs()) { + String key = kv.getKey().toString(Charset.defaultCharset()); + String value = kv.getValue().isEmpty() ? "" : + kv.getValue().toString(Charset.defaultCharset()); + keyValues.put(key, value); + } + return keyValues; + } + + @Override + public List extractValuesFromResponse(T response) { + List values = new ArrayList<>(); + E.checkArgument(response instanceof WatchResponse, + "Invalid response type %s", response.getClass()); + for (WatchEvent event : ((WatchResponse) response).getEvents()) { + // Skip if not etcd PUT event + if (!isEtcdPut(event)) { + return null; + } + + String value = event.getKeyValue().getValue() + .toString(Charset.defaultCharset()); + values.add(value); + } + return values; + } + + @Override + public Map extractKVFromResponse(T response) { + E.checkArgument(response instanceof WatchResponse, + "Invalid response type %s", response.getClass()); + + Map resultMap = new HashMap<>(); + for (WatchEvent event : ((WatchResponse) response).getEvents()) { + // Skip if not etcd PUT event + if (!isEtcdPut(event)) { + continue; + } + + String key = event.getKeyValue().getKey().toString(Charset.defaultCharset()); + String value = event.getKeyValue().getValue() + .toString(Charset.defaultCharset()); + if (Strings.isNullOrEmpty(key)) { + continue; + } + resultMap.put(key, value); + } + return resultMap; + } + + @Override + public LockResult tryLock(String key, long ttl, long timeout) { + return this.lock.tryLock(key, ttl, timeout); + } + + @Override + public boolean isLocked(String key) { + try { + long size = this.client.getKVClient().get(toByteSequence(key)) + .get().getCount(); + + return size > 0; + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to check is locked '%s'", e, key); + } + } + + @Override + public void unlock(String key, LockResult lockResult) { + this.lock.unLock(key, lockResult); + } + + @SuppressWarnings("unchecked") + @Override + public void listen(String key, Consumer consumer) { + + this.client.getWatchClient().watch(toByteSequence(key), + (Consumer) consumer); + } + + /** + * Listen etcd key with prefix + */ + @SuppressWarnings("unchecked") + @Override + public void listenPrefix(String prefix, Consumer consumer) { + ByteSequence sequence = toByteSequence(prefix); + WatchOption option = WatchOption.newBuilder().isPrefix(true).build(); + this.client.getWatchClient().watch(sequence, option, (Consumer) consumer); + + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java new file mode 100644 index 0000000000..2d0936498f --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.lock.LockResult; + +public interface MetaDriver { + + public void put(String key, String value); + + public String get(String key); + + public void delete(String key); + + public void deleteWithPrefix(String prefix); + + public Map scanWithPrefix(String prefix); + + public void listen(String key, Consumer consumer); + + public void listenPrefix(String prefix, Consumer consumer); + + public List extractValuesFromResponse(T response); + + /** + * Extract K-V pairs of response + * + * @param + * @param response + * @return + */ + public Map extractKVFromResponse(T response); + + public LockResult tryLock(String key, long ttl, long timeout); + + /** + * return if the key is Locked. + * + * @param key + * @return bool + */ + public boolean isLocked(String key); + + public void unlock(String key, LockResult lockResult); + + /** + * keepAlive of current lease + * + * @param key + * @param lease + * @return next leaseId + */ + public long keepAlive(String key, long lease); +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java new file mode 100644 index 0000000000..4824b5fb44 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java @@ -0,0 +1,1297 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.HugeAccess; +import org.apache.hugegraph.auth.HugeBelong; +import org.apache.hugegraph.auth.HugeGroup; +import org.apache.hugegraph.auth.HugePermission; +import org.apache.hugegraph.auth.HugeRole; +import org.apache.hugegraph.auth.HugeTarget; +import org.apache.hugegraph.auth.HugeUser; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.meta.managers.AuthMetaManager; +import org.apache.hugegraph.meta.managers.ConfigMetaManager; +import org.apache.hugegraph.meta.managers.GraphMetaManager; +import org.apache.hugegraph.meta.managers.KafkaMetaManager; +import org.apache.hugegraph.meta.managers.LockMetaManager; +import org.apache.hugegraph.meta.managers.SchemaMetaManager; +import org.apache.hugegraph.meta.managers.SchemaTemplateMetaManager; +import org.apache.hugegraph.meta.managers.ServiceMetaManager; +import org.apache.hugegraph.meta.managers.SpaceMetaManager; +import org.apache.hugegraph.meta.managers.TaskMetaManager; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.space.GraphSpace; +import org.apache.hugegraph.space.SchemaTemplate; +import org.apache.hugegraph.space.Service; +import org.apache.hugegraph.util.E; + +import com.google.common.collect.ImmutableMap; + +public class MetaManager { + + public static final String META_PATH_DELIMITER = "/"; + public static final String META_PATH_JOIN = "-"; + + public static final String META_PATH_HUGEGRAPH = "HUGEGRAPH"; + public static final String META_PATH_GRAPHSPACE = "GRAPHSPACE"; + public static final String META_PATH_GRAPHSPACE_LIST = "GRAPHSPACE_LIST"; + public static final String META_PATH_SERVICE = "SERVICE"; + public static final String META_PATH_SERVICE_CONF = "SERVICE_CONF"; + public static final String META_PATH_GRAPH_CONF = "GRAPH_CONF"; + public static final String META_PATH_CONF = "CONF"; + public static final String META_PATH_GRAPH = "GRAPH"; + public static final String META_PATH_SCHEMA = "SCHEMA"; + public static final String META_PATH_PROPERTY_KEY = "PROPERTY_KEY"; + public static final String META_PATH_VERTEX_LABEL = "VERTEX_LABEL"; + public static final String META_PATH_EDGE_LABEL = "EDGE_LABEL"; + public static final String META_PATH_INDEX_LABEL = "INDEX_LABEL"; + public static final String META_PATH_NAME = "NAME"; + public static final String META_PATH_ID = "ID"; + public static final String META_PATH_AUTH = "AUTH"; + public static final String META_PATH_USER = "USER"; + public static final String META_PATH_GROUP = "GROUP"; + public static final String META_PATH_ROLE = "ROLE"; + public static final String META_PATH_TARGET = "TARGET"; + public static final String META_PATH_BELONG = "BELONG"; + public static final String META_PATH_ACCESS = "ACCESS"; + public static final String META_PATH_K8S_BINDINGS = "BINDING"; + public static final String META_PATH_REST_PROPERTIES = "REST_PROPERTIES"; + public static final String META_PATH_GREMLIN_YAML = "GREMLIN_YAML"; + public static final String META_PATH_SCHEMA_TEMPLATE = "SCHEMA_TEMPLATE"; + public static final String META_PATH_TASK = "TASK"; + public static final String META_PATH_TASK_LOCK = "TASK_LOCK"; + public static final String META_PATH_AUTH_EVENT = "AUTH_EVENT"; + public static final String META_PATH_EVENT = "EVENT"; + public static final String META_PATH_ADD = "ADD"; + public static final String META_PATH_REMOVE = "REMOVE"; + public static final String META_PATH_UPDATE = "UPDATE"; + public static final String META_PATH_CLEAR = "CLEAR"; + public static final String META_PATH_DDS = "DDS_HOST"; + public static final String META_PATH_METRICS = "METRICS"; + public static final String META_PATH_KAFKA = "KAFKA"; + public static final String META_PATH_HOST = "BROKER_HOST"; + public static final String META_PATH_PORT = "BROKER_PORT"; + public static final String META_PATH_PARTITION_COUNT = "PARTITION_COUNT"; + public static final String META_PATH_DATA_SYNC_ROLE = "DATA_SYNC_ROLE"; + public static final String META_PATH_SLAVE_SERVER_HOST = "SLAVE_SERVER_HOST"; + public static final String META_PATH_SLAVE_SERVER_PORT = "SLAVE_SERVER_PORT"; + public static final String META_PATH_SYNC_BROKER = "SYNC_BROKER"; + public static final String META_PATH_SYNC_STORAGE = "SYNC_STORAGE"; + public static final String META_PATH_KAFKA_FILTER = "KAFKA-FILTER"; + public static final String META_PATH_WHITE_IP_LIST = "WHITE_IP_LIST"; + public static final String META_PATH_WHITE_IP_STATUS = "WHITE_IP_STATUS"; + public static final long LOCK_DEFAULT_LEASE = 30L; + public static final long LOCK_DEFAULT_TIMEOUT = 10L; + public static final int RANDOM_USER_ID = 100; + private static final String META_PATH_URLS = "URLS"; + private static final String META_PATH_PD_PEERS = "HSTORE_PD_PEERS"; + private static final MetaManager INSTANCE = new MetaManager(); + private MetaDriver metaDriver; + private String cluster; + private AuthMetaManager authMetaManager; + private GraphMetaManager graphMetaManager; + private SchemaMetaManager schemaMetaManager; + private ServiceMetaManager serviceMetaManager; + private SpaceMetaManager spaceMetaManager; + private TaskMetaManager taskMetaManager; + private ConfigMetaManager configMetaManager; + private KafkaMetaManager kafkaMetaManager; + private SchemaTemplateMetaManager schemaTemplateManager; + private LockMetaManager lockMetaManager; + + private MetaManager() { + } + + public static MetaManager instance() { + return INSTANCE; + } + + public synchronized boolean isReady() { + return null != this.metaDriver; + } + + public String cluster() { + return this.cluster; + } + + public synchronized void connect(String cluster, MetaDriverType type, + String trustFile, String clientCertFile, + String clientKeyFile, Object... args) { + E.checkArgument(cluster != null && !cluster.isEmpty(), + "The cluster can't be null or empty"); + if (this.metaDriver == null) { + this.cluster = cluster; + + switch (type) { + case ETCD: + this.metaDriver = trustFile == null || trustFile.isEmpty() ? + new EtcdMetaDriver(args) : + new EtcdMetaDriver(trustFile, + clientCertFile, + clientKeyFile, args); + break; + case PD: + assert args.length > 0; + // FIXME: assume pd.peers is urls separated by commas in a string + // like `127.0.0.1:8686,127.0.0.1:8687,127.0.0.1:8688` + this.metaDriver = new PdMetaDriver((String) args[0]); + break; + default: + throw new AssertionError(String.format( + "Invalid meta driver type: %s", type)); + } + } + this.initManagers(this.cluster); + } + + private void initManagers(String cluster) { + this.authMetaManager = new AuthMetaManager(this.metaDriver, cluster); + this.graphMetaManager = new GraphMetaManager(this.metaDriver, cluster); + this.schemaMetaManager = new SchemaMetaManager(this.metaDriver, cluster, null); + this.serviceMetaManager = new ServiceMetaManager(this.metaDriver, cluster); + this.spaceMetaManager = new SpaceMetaManager(this.metaDriver, cluster); + this.taskMetaManager = new TaskMetaManager(this.metaDriver, cluster); + this.configMetaManager = new ConfigMetaManager(this.metaDriver, cluster); + this.kafkaMetaManager = new KafkaMetaManager(this.metaDriver, cluster); + this.schemaTemplateManager = new SchemaTemplateMetaManager(this.metaDriver, cluster); + this.lockMetaManager = new LockMetaManager(this.metaDriver, cluster); + } + + public void listenGraphSpaceAdd(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceAdd(consumer); + } + + public void listenGraphSpaceRemove(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceRemove(consumer); + } + + public void listenGraphSpaceUpdate(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceUpdate(consumer); + } + + public void notifyGraphSpaceAdd(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceAdd(graphSpace); + } + + public void notifyGraphSpaceRemove(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceRemove(graphSpace); + } + + public void notifyGraphSpaceUpdate(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceUpdate(graphSpace); + } + + public void listenServiceAdd(Consumer consumer) { + this.serviceMetaManager.listenServiceAdd(consumer); + } + + public void listenServiceRemove(Consumer consumer) { + this.serviceMetaManager.listenServiceRemove(consumer); + } + + public void listenServiceUpdate(Consumer consumer) { + this.serviceMetaManager.listenServiceUpdate(consumer); + } + + public void listenGraphAdd(Consumer consumer) { + this.graphMetaManager.listenGraphAdd(consumer); + } + + public void listenGraphUpdate(Consumer consumer) { + this.graphMetaManager.listenGraphUpdate(consumer); + } + + public void listenGraphRemove(Consumer consumer) { + this.graphMetaManager.listenGraphRemove(consumer); + } + + public void listenGraphClear(Consumer consumer) { + this.graphMetaManager.listenGraphClear(consumer); + } + + public void listenSchemaCacheClear(Consumer consumer) { + this.graphMetaManager.listenSchemaCacheClear(consumer); + } + + public void listenGraphCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphCacheClear(consumer); + } + + /** + * 监听vertex label变化, graph vertex cache clear + * + * @param consumer + * @param + */ + public void listenGraphVertexCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphVertexCacheClear(consumer); + } + + /** + * 监听edge label变化, graph edge cache clear + * + * @param consumer + * @param + */ + public void listenGraphEdgeCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphEdgeCacheClear(consumer); + } + + public void listenRestPropertiesUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.configMetaManager.listenRestPropertiesUpdate(graphSpace, + serviceId, + consumer); + } + + public void listenGremlinYamlUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.configMetaManager.listenGremlinYamlUpdate(graphSpace, + serviceId, + consumer); + } + + public void listenAuthEvent(Consumer consumer) { + this.authMetaManager.listenAuthEvent(consumer); + } + + private void putAuthEvent(AuthEvent event) { + this.authMetaManager.putAuthEvent(event); + } + + public void listenKafkaConfig(Consumer consumer) { + this.kafkaMetaManager.listenKafkaConfig(consumer); + } + + public String kafkaGetRaw(String key) { + return this.kafkaMetaManager.getRaw(key); + } + + public void kafkaPutOrDeleteRaw(String key, String val) { + this.kafkaMetaManager.putOrDeleteRaw(key, val); + } + + public Map graphSpaceConfigs() { + return this.spaceMetaManager.graphSpaceConfigs(); + } + + public Map serviceConfigs(String graphSpace) { + return this.serviceMetaManager.serviceConfigs(graphSpace); + } + + public Map> graphConfigs(String graphSpace) { + return this.graphMetaManager.graphConfigs(graphSpace); + } + + public Set schemaTemplates(String graphSpace) { + return this.schemaTemplateManager.schemaTemplates(graphSpace); + } + + @SuppressWarnings("unchecked") + public SchemaTemplate schemaTemplate(String graphSpace, + String schemaTemplate) { + return this.schemaTemplateManager.schemaTemplate(graphSpace, + schemaTemplate); + } + + public void addSchemaTemplate(String graphSpace, SchemaTemplate template) { + this.schemaTemplateManager.addSchemaTemplate(graphSpace, template); + } + + public void updateSchemaTemplate(String graphSpace, + SchemaTemplate template) { + this.schemaTemplateManager.updateSchemaTemplate(graphSpace, template); + } + + public void removeSchemaTemplate(String graphSpace, String name) { + this.schemaTemplateManager.removeSchemaTemplate(graphSpace, name); + } + + public void clearSchemaTemplate(String graphSpace) { + this.schemaTemplateManager.clearSchemaTemplate(graphSpace); + } + + public String extractGraphSpaceFromKey(String key) { + String[] parts = key.split(META_PATH_DELIMITER); + if (parts.length < 4) { + return null; + } + if (parts[3].equals(META_PATH_CONF)) { + return parts.length < 5 ? null : parts[4]; + } + return parts[3]; + } + + public List extractGraphFromKey(String key) { + String[] parts = key.split(META_PATH_DELIMITER); + if (parts.length < 6) { + return Collections.EMPTY_LIST; + } + return Arrays.asList(parts[3], parts[5]); + } + + public List extractGraphSpacesFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public List extractServicesFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public List extractGraphsFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public Map extractKVFromResponse(T response) { + return this.metaDriver.extractKVFromResponse(response); + } + + public GraphSpace getGraphSpaceConfig(String graphSpace) { + return this.spaceMetaManager.getGraphSpaceConfig(graphSpace); + } + + public String getServiceRawConfig(String graphSpace, String service) { + return this.serviceMetaManager.getServiceRawConfig(graphSpace, service); + } + + public Service parseServiceRawConfig(String serviceRawConf) { + return this.serviceMetaManager.parseServiceRawConfig(serviceRawConf); + } + + public Service getServiceConfig(String graphSpace, String service) { + return this.serviceMetaManager.getServiceConfig(graphSpace, service); + } + + public Map getGraphConfig(String graphSpace, String graph) { + return this.graphMetaManager.getGraphConfig(graphSpace, graph); + } + + public void addGraphConfig(String graphSpace, String graph, + Map configs) { + this.graphMetaManager.addGraphConfig(graphSpace, graph, configs); + } + + public void updateGraphConfig(String graphSpace, String graph, + Map configs) { + this.graphMetaManager.updateGraphConfig(graphSpace, graph, configs); + } + + public GraphSpace graphSpace(String name) { + return this.spaceMetaManager.graphSpace(name); + } + + public void addGraphSpaceConfig(String name, GraphSpace space) { + this.spaceMetaManager.addGraphSpaceConfig(name, space); + } + + public void removeGraphSpaceConfig(String name) { + this.spaceMetaManager.removeGraphSpaceConfig(name); + } + + public void updateGraphSpaceConfig(String name, GraphSpace space) { + this.spaceMetaManager.updateGraphSpaceConfig(name, space); + } + + public void appendGraphSpaceList(String name) { + this.spaceMetaManager.appendGraphSpaceList(name); + } + + public void clearGraphSpaceList(String name) { + this.spaceMetaManager.clearGraphSpaceList(name); + } + + public void notifyServiceAdd(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceAdd(graphSpace, name); + } + + public void notifyServiceRemove(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceRemove(graphSpace, name); + } + + public void notifyServiceUpdate(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceUpdate(graphSpace, name); + } + + public Service service(String graphSpace, String name) { + return this.serviceMetaManager.service(graphSpace, name); + } + + public void addServiceConfig(String graphSpace, Service service) { + this.serviceMetaManager.addServiceConfig(graphSpace, service); + } + + public void removeServiceConfig(String graphSpace, String service) { + this.serviceMetaManager.removeServiceConfig(graphSpace, service); + } + + public void updateServiceConfig(String graphSpace, Service service) { + this.addServiceConfig(graphSpace, service); + } + + public void removeGraphConfig(String graphSpace, String graph) { + this.graphMetaManager.removeGraphConfig(graphSpace, graph); + } + + public void notifyGraphAdd(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphAdd(graphSpace, graph); + } + + public void notifyGraphRemove(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphRemove(graphSpace, graph); + } + + public void notifyGraphUpdate(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphUpdate(graphSpace, graph); + } + + public void notifyGraphClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphClear(graphSpace, graph); + } + + public void notifySchemaCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifySchemaCacheClear(graphSpace, graph); + } + + public void notifyGraphCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphCacheClear(graphSpace, graph); + } + + /** + * 通知 需要进行 graph vertex cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphVertexCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphVertexCacheClear(graphSpace, graph); + } + + /** + * 通知 需要进行 graph edge cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphEdgeCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphEdgeCacheClear(graphSpace, graph); + } + + public LockResult tryLock(String key) { + return this.lockMetaManager.tryLock(key); + } + + public void unlock(LockResult lockResult, String... keys) { + this.lockMetaManager.unlock(lockResult, keys); + } + + public void unlock(String key, LockResult lockResult) { + this.lockMetaManager.unlock(key, lockResult); + } + + public String belongId(String userName, String roleName) { + return this.authMetaManager.belongId(userName, roleName, HugeBelong.UR); + } + + public String belongId(String source, String target, String link) { + return this.authMetaManager.belongId(source, target, link); + } + + public String accessId(String roleName, String targetName, + HugePermission permission) { + return this.authMetaManager.accessId(roleName, targetName, permission); + } + + private String graphSpaceBindingsServer(String name, BindingType type) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + name, + META_PATH_K8S_BINDINGS, + type.name(), + META_PATH_URLS); + } + + /** + * Get DDS (eureka) host, format should be "ip:port", with no / + * + * @return + */ + private String ddsHostKey() { + // HUGEGRAPH/{cluster}/DDS_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_DDS); + } + + private String hugeClusterRoleKey() { + // HUGEGRAPH/{clusterRole}/KAFKA/DATA_SYNC_ROLE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_DATA_SYNC_ROLE); + } + + private String kafkaHostKey() { + // HUGEGRAPH/{cluster}/KAFKA/BROKER_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_HOST); + } + + private String kafkaPortKey() { + // HUGEGRAPH/{cluster}/KAFKA/BROKER_PORT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_PORT); + } + + private String kafkaPartitionCountKey() { + // HUGEGRAPH/{cluster}/KAFKA/PARTITION_COUNT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_PARTITION_COUNT); + } + + private String kafkaSlaveHostKey() { + // HUGEGRAPH/{cluster}/KAFKA/SLAVE_SERVER_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SLAVE_SERVER_HOST); + } + + private String kafkaSlavePortKey() { + // HUGEGRAPH/{cluster}/KAFKA/SLAVE_SERVER_PORT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SLAVE_SERVER_PORT); + } + + public String kafkaSyncBrokerKey() { + // HUGEGRAPH/{cluster}/KAFKA/SYNC_BROKER + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SYNC_BROKER); + } + + public String kafkaSyncStorageKey() { + // HUGEGRAPH/{cluster}/KAFKA/SYNC_STORAGE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SYNC_STORAGE); + } + + public String kafkaFilterGraphspaceKey() { + // HUGEGRAPH/{cluster}/KAFKA-FILTER/GRAPHSPACE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA_FILTER, + META_PATH_GRAPHSPACE); + } + + public String kafkaFilterGraphKey() { + // HUGEGRAPH/{cluster}/KAFKA-FILTER/FILTER/GRAPH + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA_FILTER, + META_PATH_GRAPH); + } + + private String whiteIpListKey() { + // HUGEGRAPH/{cluster}/WHITE_IP_LIST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_WHITE_IP_LIST); + } + + private String whiteIpStatusKey() { + // HUGEGRAPH/{cluster}/WHITE_IP_STATUS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_WHITE_IP_STATUS); + } + + private String hstorePDPeersKey() { + // HUGEGRAPH/{cluster}/META_PATH_PD_PEERS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_PD_PEERS); + } + + public Id addPropertyKey(String graphSpace, String graph, + PropertyKey propertyKey) { + this.schemaMetaManager.addPropertyKey(graphSpace, graph, propertyKey); + return IdGenerator.ZERO; + } + + public void updatePropertyKey(String graphSpace, String graph, + PropertyKey pkey) { + this.schemaMetaManager.updatePropertyKey(graphSpace, graph, pkey); + } + + public PropertyKey getPropertyKey(String graphSpace, String graph, + Id propertyKey) { + return this.schemaMetaManager.getPropertyKey(graphSpace, graph, + propertyKey); + } + + public PropertyKey getPropertyKey(String graphSpace, String graph, + String propertyKey) { + return this.schemaMetaManager.getPropertyKey(graphSpace, graph, + propertyKey); + } + + public List getPropertyKeys(String graphSpace, String graph) { + return this.schemaMetaManager.getPropertyKeys(graphSpace, graph); + } + + public Id removePropertyKey(String graphSpace, String graph, + Id propertyKey) { + return this.schemaMetaManager.removePropertyKey(graphSpace, graph, + propertyKey); + } + + public void addVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.schemaMetaManager.addVertexLabel(graphSpace, graph, vertexLabel); + } + + public void updateVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.schemaMetaManager.updateVertexLabel(graphSpace, graph, + vertexLabel); + } + + public VertexLabel getVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + return this.schemaMetaManager.getVertexLabel(graphSpace, graph, + vertexLabel); + } + + public VertexLabel getVertexLabel(String graphSpace, String graph, + String vertexLabel) { + return this.schemaMetaManager.getVertexLabel(graphSpace, graph, + vertexLabel); + } + + public List getVertexLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getVertexLabels(graphSpace, graph); + } + + public Id removeVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + return this.schemaMetaManager.removeVertexLabel(graphSpace, graph, + vertexLabel); + } + + public void addEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.schemaMetaManager.addEdgeLabel(graphSpace, graph, edgeLabel); + } + + public void updateEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.schemaMetaManager.updateEdgeLabel(graphSpace, graph, edgeLabel); + } + + + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + return this.schemaMetaManager.getEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + String edgeLabel) { + return this.schemaMetaManager.getEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public List getEdgeLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getEdgeLabels(graphSpace, graph); + } + + public Id removeEdgeLabel(String graphSpace, String graph, Id edgeLabel) { + return this.schemaMetaManager.removeEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public void addIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.schemaMetaManager.addIndexLabel(graphSpace, graph, indexLabel); + } + + public void updateIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.schemaMetaManager.updateIndexLabel(graphSpace, graph, indexLabel); + } + + public IndexLabel getIndexLabel(String graphSpace, String graph, + Id indexLabel) { + return this.schemaMetaManager.getIndexLabel(graphSpace, graph, + indexLabel); + } + + public IndexLabel getIndexLabel(String graphSpace, String graph, + String indexLabel) { + return this.schemaMetaManager.getIndexLabel(graphSpace, graph, + indexLabel); + } + + public List getIndexLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getIndexLabels(graphSpace, graph); + } + + public Id removeIndexLabel(String graphSpace, String graph, Id indexLabel) { + return this.schemaMetaManager.removeIndexLabel(graphSpace, graph, + indexLabel); + } + + public void createUser(HugeUser user) throws IOException { + this.authMetaManager.createUser(user); + } + + public HugeUser updateUser(HugeUser user) throws IOException { + return this.authMetaManager.updateUser(user); + } + + public HugeUser deleteUser(Id id) throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteUser(id); + } + + public HugeUser findUser(String name) + throws IOException, ClassNotFoundException { + return this.authMetaManager.findUser(name); + } + + public List listUsers(List ids) throws IOException, + ClassNotFoundException { + return this.authMetaManager.listUsers(ids); + } + + public List listAllUsers(long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllUsers(limit); + } + + public Id createGroup(HugeGroup group) throws IOException { + return this.authMetaManager.createGroup(group); + } + + public HugeGroup updateGroup(HugeGroup group) throws IOException { + return this.authMetaManager.updateGroup(group); + } + + public HugeGroup deleteGroup(Id id) throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteGroup(id); + } + + public HugeGroup findGroup(String name) throws IOException, + ClassNotFoundException { + return this.authMetaManager.findGroup(name); + } + + public List listGroups(long limit) throws IOException, + ClassNotFoundException { + return this.authMetaManager.listGroups(limit); + } + + public Id createRole(String graphSpace, HugeRole role) + throws IOException { + return this.authMetaManager.createRole(graphSpace, role); + } + + public HugeRole updateRole(String graphSpace, HugeRole role) + throws IOException { + return this.authMetaManager.updateRole(graphSpace, role); + } + + public HugeRole deleteRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteRole(graphSpace, id); + } + + public HugeRole findRole(String graphSpace, Id id) { + return this.authMetaManager.findRole(graphSpace, id); + } + + public HugeRole getRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getRole(graphSpace, id); + } + + public List listRoles(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listRoles(graphSpace, ids); + } + + public List listAllRoles(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllRoles(graphSpace, limit); + } + + public Id createTarget(String graphSpace, HugeTarget target) + throws IOException { + return this.authMetaManager.createTarget(graphSpace, target); + } + + public HugeTarget updateTarget(String graphSpace, HugeTarget target) + throws IOException { + return this.authMetaManager.updateTarget(graphSpace, target); + } + + public HugeTarget deleteTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteTarget(graphSpace, id); + } + + public HugeTarget findTarget(String graphSpace, Id id) { + return this.authMetaManager.findTarget(graphSpace, id); + } + + public HugeTarget getTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getTarget(graphSpace, id); + } + + public List listTargets(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listTargets(graphSpace, ids); + } + + public List listAllTargets(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllTargets(graphSpace, limit); + } + + public Id createBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + return this.authMetaManager.createBelong(graphSpace, belong); + } + + public HugeBelong updateBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + return this.authMetaManager.updateBelong(graphSpace, belong); + } + + public HugeBelong deleteBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteBelong(graphSpace, id); + } + + public HugeBelong getBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getBelong(graphSpace, id); + } + + public boolean existBelong(String graphSpace, Id id) { + return this.authMetaManager.existBelong(graphSpace, id); + } + + public List listBelong(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelong(graphSpace, ids); + } + + public List listAllBelong(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllBelong(graphSpace, limit); + } + + public List listBelongBySource(String graphSpace, + Id user, String link, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelongBySource(graphSpace, user, + link, limit); + } + + public List listBelongByTarget(String graphSpace, + Id role, String link, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelongByTarget(graphSpace, role, + link, limit); + } + + public Id createAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + return this.authMetaManager.createAccess(graphSpace, access); + } + + public HugeAccess updateAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + return this.authMetaManager.updateAccess(graphSpace, access); + } + + public HugeAccess deleteAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + return this.authMetaManager.deleteAccess(graphSpace, id); + } + + public HugeAccess findAccess(String graphSpace, Id id) { + return this.authMetaManager.findAccess(graphSpace, id); + } + + public HugeAccess getAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + return this.authMetaManager.getAccess(graphSpace, id); + } + + public List listAccess(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccess(graphSpace, ids); + } + + public List listAllAccess(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllAccess(graphSpace, limit); + } + + public List listAccessByRole(String graphSpace, + Id role, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccessByRole(graphSpace, role, limit); + } + + public String targetFromAccess(String accessKey) { + return this.authMetaManager.targetFromAccess(accessKey); + } + + public void clearGraphAuth(String graphSpace) { + this.authMetaManager.clearGraphAuth(graphSpace); + } + + public List listAccessByTarget(String graphSpace, + Id target, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccessByTarget(graphSpace, target, + limit); + } + + public List listGraphSpace() { + return this.spaceMetaManager.listGraphSpace(); + } + + public void initDefaultGraphSpace() { + String defaultGraphSpace = "DEFAULT"; + this.appendGraphSpaceList(defaultGraphSpace); + } + + public Map restProperties(String graphSpace, + String serviceId) { + return this.configMetaManager.restProperties(graphSpace, serviceId); + } + + public Map restProperties(String graphSpace, + String serviceId, + Map properties) { + return this.configMetaManager.restProperties(graphSpace, serviceId, + properties); + } + + public Map deleteRestProperties(String graphSpace, + String serviceId, + String key) { + return this.configMetaManager.deleteRestProperties(graphSpace, + serviceId, key); + } + + public Map clearRestProperties(String graphSpace, + String serviceId) { + return this.configMetaManager.clearRestProperties(graphSpace, + serviceId); + } + + public LockResult tryLockTask(String graphSpace, String graphName, + String taskId) { + return this.taskMetaManager.tryLockTask(graphSpace, graphName, taskId); + } + + public boolean isLockedTask(String graphSpace, String graphName, + String taskId) { + return this.taskMetaManager.isLockedTask(graphSpace, graphName, taskId); + } + + public void unlockTask(String graphSpace, String graphName, + String taskId, LockResult lockResult) { + this.taskMetaManager.unlockTask(graphSpace, graphName, taskId, lockResult); + } + + public String gremlinYaml(String graphSpace, String serviceId) { + return this.configMetaManager.gremlinYaml(graphSpace, serviceId); + } + + public String gremlinYaml(String graphSpace, String serviceId, + String yaml) { + return this.configMetaManager.gremlinYaml(graphSpace, serviceId, yaml); + } + + public String hstorePDPeers() { + return this.metaDriver.get(hstorePDPeersKey()); + } + + public void listenAll(Consumer consumer) { + this.metaDriver.listenPrefix(MetaManager.META_PATH_HUGEGRAPH, consumer); + } + + public SchemaMetaManager schemaMetaManager() { + return this.schemaMetaManager; + } + + public MetaDriver metaDriver() { + return this.metaDriver; + } + + public String getDDSHost() { + String key = this.ddsHostKey(); + String host = this.metaDriver.get(key); + return host; + } + + public String getHugeGraphClusterRole() { + String key = this.hugeClusterRoleKey(); + String role = this.metaDriver.get(key); + return role; + } + + public String getKafkaBrokerHost() { + String key = this.kafkaHostKey(); + return this.metaDriver.get(key); + } + + public String getKafkaBrokerPort() { + String key = this.kafkaPortKey(); + return this.metaDriver.get(key); + } + + public Integer getPartitionCount() { + String key = this.kafkaPartitionCountKey(); + String result = this.metaDriver.get(key); + try { + Integer count = Integer.parseInt(Optional.ofNullable(result) + .orElse("0")); + return count < 1 ? 1 : count; + } catch (Exception e) { + return 1; + } + } + + public String getKafkaSlaveServerHost() { + String key = this.kafkaSlaveHostKey(); + return this.metaDriver.get(key); + } + + public Integer getKafkaSlaveServerPort() { + String key = this.kafkaSlavePortKey(); + String portStr = this.metaDriver.get(key); + int port = Integer.parseInt(portStr); + return port; + } + + public List getKafkaFilteredGraphspace() { + String key = this.kafkaFilterGraphspaceKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return Collections.EMPTY_LIST; + } + String[] parts = raw.split(","); + return Arrays.asList(parts); + } + + public List getKafkaFilteredGraph() { + String key = this.kafkaFilterGraphKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return Collections.EMPTY_LIST; + } + String[] parts = raw.split(","); + return Arrays.asList(parts); + } + + public void updateKafkaFilteredGraphspace(List graphSpaces) { + String key = this.kafkaFilterGraphspaceKey(); + String val = String.join(",", graphSpaces); + this.metaDriver.put(key, val); + + } + + public void updateKafkaFilteredGraph(List graphs) { + String key = this.kafkaFilterGraphKey(); + String val = String.join(",", graphs); + this.metaDriver.put(key, val); + } + + public List getWhiteIpList() { + String key = this.whiteIpListKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return new ArrayList<>(); + } + String[] parts = raw.split(","); + return new ArrayList<>(Arrays.asList(parts)); + } + + public void setWhiteIpList(List whiteIpList) { + String key = this.whiteIpListKey(); + + String val = String.join(",", whiteIpList); + this.metaDriver.put(key, val); + } + + public String getCompStatus(String statuskey) { + String raw = this.metaDriver.get(statuskey); + if (StringUtils.isEmpty(raw)) { + return ""; + } + return raw; + } + + public boolean getWhiteIpStatus() { + String key = this.whiteIpStatusKey(); + String raw = this.metaDriver.get(key); + return ("true".equals(raw)); + } + + public void setWhiteIpStatus(boolean status) { + String key = this.whiteIpStatusKey(); + this.metaDriver.put(key, ((Boolean) status).toString()); + } + + public enum MetaDriverType { + ETCD, + PD + } + + public enum BindingType { + OLTP, + OLAP, + STORAGE + } + + public static class AuthEvent { + private String op; // ALLOW: CREATE | DELETE | UPDATE + private String type; // ALLOW: USER | GROUP | TARGET | ACCESS | BELONG + private String id; + + public AuthEvent(String op, String type, String id) { + this.op = op; + this.type = type; + this.id = id; + } + + public AuthEvent(Map properties) { + this.op = properties.get("op").toString(); + this.type = properties.get("type").toString(); + this.id = properties.get("id").toString(); + } + + public String op() { + return this.op; + } + + public void op(String op) { + this.op = op; + } + + public String type() { + return this.type; + } + + public void type(String type) { + this.type = type; + } + + public String id() { + return this.id; + } + + public void id(String id) { + this.id = id; + } + + public Map asMap() { + return ImmutableMap.of("op", this.op, + "type", this.type, + "id", this.id); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java new file mode 100644 index 0000000000..f7da14196c --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.meta.lock.PdDistributedLock; +import org.apache.hugegraph.pd.client.KvClient; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.client.PDConfig; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.kv.KResponse; +import org.apache.hugegraph.pd.grpc.kv.LockResponse; +import org.apache.hugegraph.pd.grpc.kv.ScanPrefixResponse; +import org.apache.hugegraph.pd.grpc.kv.TTLResponse; +import org.apache.hugegraph.pd.grpc.kv.WatchEvent; +import org.apache.hugegraph.pd.grpc.kv.WatchResponse; +import org.apache.hugegraph.pd.grpc.kv.WatchType; + +import com.google.common.base.Strings; + +public class PdMetaDriver implements MetaDriver { + + private final KvClient client; + private final PDClient pdClient; + private final PdDistributedLock lock; + + public PdMetaDriver(String pdPeer) { + PDConfig pdConfig = PDConfig.of(pdPeer); + this.client = new KvClient<>(pdConfig); + this.pdClient = PDClient.create(pdConfig); + this.lock = new PdDistributedLock(this.client); + } + + public PDClient pdClient() { + return this.pdClient; + } + + @Override + public void put(String key, String value) { + try { + this.client.put(key, value); + } catch (PDException e) { + throw new HugeException("Failed to put '%s:%s' to pd", e, key, value); + } + } + + @Override + public String get(String key) { + try { + KResponse response = this.client.get(key); + return response.getValue(); + } catch (PDException e) { + throw new HugeException("Failed to get '%s' from pd", e, key); + } + } + + @Override + public void delete(String key) { + try { + this.client.delete(key); + } catch (PDException e) { + throw new HugeException("Failed to delete '%s' from pd", e, key); + } + } + + @Override + public void deleteWithPrefix(String prefix) { + try { + this.client.deletePrefix(prefix); + } catch (PDException e) { + throw new HugeException("Failed to deleteWithPrefix '%s' from pd", e, prefix); + } + } + + @Override + public Map scanWithPrefix(String prefix) { + try { + ScanPrefixResponse response = this.client.scanPrefix(prefix); + return response.getKvsMap(); + } catch (PDException e) { + throw new HugeException("Failed to scanWithPrefix '%s' from pd", e, prefix); + } + } + + @Override + public void listen(String key, Consumer consumer) { + try { + this.client.listen(key, (Consumer) consumer); + } catch (PDException e) { + throw new HugeException("Failed to listen '%s' to pd", e, key); + } + } + + @Override + public void listenPrefix(String prefix, Consumer consumer) { + try { + this.client.listenPrefix(prefix, (Consumer) consumer); + } catch (PDException e) { + throw new HugeException("Failed to listenPrefix '%s' to pd", e, prefix); + } + } + + @Override + public List extractValuesFromResponse(T response) { + List values = new ArrayList<>(); + WatchResponse res = (WatchResponse) response; + for (WatchEvent event : res.getEventsList()) { + // Skip if not PUT event + if (!event.getType().equals(WatchType.Put)) { + return null; + } + String value = event.getCurrent().getValue(); + values.add(value); + } + return values; + } + + @Override + public Map extractKVFromResponse(T response) { + Map resultMap = new HashMap<>(); + WatchResponse res = (WatchResponse) response; + for (WatchEvent event : res.getEventsList()) { + // Skip if not etcd PUT event + if (!event.getType().equals(WatchType.Put)) { + continue; + } + + String key = event.getCurrent().getKey(); + String value = event.getCurrent().getValue(); + if (Strings.isNullOrEmpty(key)) { + continue; + } + resultMap.put(key, value); + } + return resultMap; + } + + @Override + public LockResult tryLock(String key, long ttl, long timeout) { + return this.lock.lock(key, ttl); + } + + @Override + public boolean isLocked(String key) { + LockResponse locked; + try { + locked = this.client.isLocked(key); + } catch (PDException e) { + throw new HugeException("Failed to get isLocked '%s' from pd", key); + } + return locked.getSucceed(); + } + + @Override + public void unlock(String key, LockResult lockResult) { + this.lock.unLock(key, lockResult); + } + + @Override + public long keepAlive(String key, long lease) { + try { + LockResponse lockResponse = this.client.keepAlive(key); + boolean succeed = lockResponse.getSucceed(); + if (!succeed) { + throw new HugeException("Failed to keepAlive '%s' to pd", key); + } + return lockResponse.getClientId(); + } catch (PDException e) { + throw new HugeException("Failed to keepAlive '%s' to pd", e, key); + } + } + + public boolean keepTTLAlive(String key) { + try { + TTLResponse response = this.client.keepTTLAlive(key); + return response.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepTTLAlive '%s' to pd", e, key); + } + } + + public boolean putTTL(String key, String value, long ttl) { + try { + TTLResponse response = this.client.putTTL(key, value, ttl); + return response.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepTTLAlive '%s' to pd", e, key); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java new file mode 100644 index 0000000000..ed62a429f0 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.nio.charset.Charset; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.Lease; +import io.etcd.jetcd.Lock; + +public class EtcdDistributedLock { + + protected static final Logger LOG = Log.logger(EtcdDistributedLock.class); + private static final long UNLIMITED_TIMEOUT = -1L; + private final static Object mutex = new Object(); + private static EtcdDistributedLock lockProvider = null; + private final KV kvClient; + private final Lock lockClient; + private final Lease leaseClient; + + private static final int poolSize = 8; + private final ScheduledExecutorService service = new ScheduledThreadPoolExecutor(poolSize, r -> { + Thread t = new Thread(r, "keepalive"); + t.setDaemon(true); + return t; + }); + + private EtcdDistributedLock(Client client) { + this.kvClient = client.getKVClient(); + this.lockClient = client.getLockClient(); + this.leaseClient = client.getLeaseClient(); + } + + public static EtcdDistributedLock getInstance(Client client) { + synchronized (mutex) { + if (null == lockProvider) { + lockProvider = new EtcdDistributedLock(client); + } + } + return lockProvider; + } + + private static ByteSequence toByteSequence(String content) { + return ByteSequence.from(content, Charset.defaultCharset()); + } + + public LockResult tryLock(String lockName, long ttl, long timeout) { + LockResult lockResult = new LockResult(); + lockResult.lockSuccess(false); + lockResult.setService(service); + + long leaseId; + + try { + leaseId = this.leaseClient.grant(ttl).get().getID(); + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread {} failed to create lease for {} " + + "with ttl {}", Thread.currentThread().getName(), + lockName, ttl), + e); + return lockResult; + } + + lockResult.setLeaseId(leaseId); + + long period = ttl - ttl / 5; + service.scheduleAtFixedRate(new KeepAliveTask(this.leaseClient, leaseId), + period, period, TimeUnit.SECONDS); + + try { + if (timeout == UNLIMITED_TIMEOUT) { + this.lockClient.lock(toByteSequence(lockName), leaseId).get(); + + } else { + this.lockClient.lock(toByteSequence(lockName), leaseId) + .get(1, TimeUnit.SECONDS); + } + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread {} failed to lock {}", + Thread.currentThread().getName(), lockName), + e); + service.shutdown(); + this.revokeLease(leaseId); + return lockResult; + } catch (TimeoutException e) { + // 获取锁超时 + LOG.warn("Thread {} timeout to lock {}", + Thread.currentThread().getName(), lockName); + service.shutdown(); + this.revokeLease(leaseId); + return lockResult; + } + + lockResult.lockSuccess(true); + + return lockResult; + } + + public LockResult lock(String lockName, long ttl) { + return tryLock(lockName, ttl, UNLIMITED_TIMEOUT); + } + + public void unLock(String lockName, LockResult lockResult) { + LOG.debug("Thread {} start to unlock {}", + Thread.currentThread().getName(), lockName); + + lockResult.getService().shutdown(); + + if (lockResult.getLeaseId() != 0L) { + this.revokeLease(lockResult.getLeaseId()); + } + + LOG.debug("Thread {} unlock {} successfully", + Thread.currentThread().getName(), lockName); + } + + private void revokeLease(long leaseId) { + try { + this.leaseClient.revoke(leaseId).get(); + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread %s failed to revoke release %s", + Thread.currentThread().getName(), leaseId), e); + } + } + + public static class KeepAliveTask implements Runnable { + + private final Lease leaseClient; + private final long leaseId; + + KeepAliveTask(Lease leaseClient, long leaseId) { + this.leaseClient = leaseClient; + this.leaseId = leaseId; + } + + @Override + public void run() { + // TODO: calculate the time interval between the calls + this.leaseClient.keepAliveOnce(this.leaseId); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java new file mode 100644 index 0000000000..6909b73183 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; + +public class LockResult { + + private boolean lockSuccess; + private long leaseId; + private ScheduledExecutorService service; + private ScheduledFuture future; + + public void lockSuccess(boolean isLockSuccess) { + this.lockSuccess = isLockSuccess; + } + + public boolean lockSuccess() { + return this.lockSuccess; + } + + public long getLeaseId() { + return this.leaseId; + } + + public void setLeaseId(long leaseId) { + this.leaseId = leaseId; + } + + public ScheduledExecutorService getService() { + return this.service; + } + + public void setService(ScheduledExecutorService service) { + this.service = service; + } + + public ScheduledFuture getFuture() { + return future; + } + + public void setFuture(ScheduledFuture future) { + this.future = future; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java new file mode 100644 index 0000000000..3e8c66ea14 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.pd.client.KvClient; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.kv.LockResponse; + +public class PdDistributedLock { + + private static final int poolSize = 8; + private final KvClient client; + private final ScheduledExecutorService service = new ScheduledThreadPoolExecutor(poolSize, r -> { + Thread t = new Thread(r, "keepalive"); + t.setDaemon(true); + return t; + }); + + public PdDistributedLock(KvClient client) { + this.client = client; + } + + public LockResult lock(String key, long second) { + long ttl = second * 1000L; + try { + LockResponse response = this.client.lockWithoutReentrant(key, ttl); + boolean succeed = response.getSucceed(); + LockResult result = new LockResult(); + if (succeed) { + result.setLeaseId(response.getClientId()); + result.lockSuccess(true); + long period = ttl - ttl / 4; + ScheduledFuture future = service.scheduleAtFixedRate(() -> { + // TODO: why synchronized? + synchronized (result) { + keepAlive(key); + } + }, period, period, TimeUnit.MILLISECONDS); + result.setFuture(future); + } + return result; + } catch (PDException e) { + throw new HugeException("Failed to lock '%s' to pd", e, key); + } + } + + public void unLock(String key, LockResult lockResult) { + try { + LockResponse response = this.client.unlock(key); + boolean succeed = response.getSucceed(); + if (!succeed) { + throw new HugeException("Failed to unlock '%s' to pd", key); + } + if (lockResult.getFuture() != null) { + // TODO: why synchronized? + synchronized (lockResult) { + lockResult.getFuture().cancel(true); + } + } + } catch (PDException e) { + throw new HugeException("Failed to unlock '%s' to pd", e, key); + } + } + + public boolean keepAlive(String key) { + try { + LockResponse alive = this.client.keepAlive(key); + return alive.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepAlive '%s' to pd", key); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java new file mode 100644 index 0000000000..b1928d38eb --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.LOCK_DEFAULT_LEASE; +import static org.apache.hugegraph.meta.MetaManager.LOCK_DEFAULT_TIMEOUT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; + +import java.util.Map; +import java.util.Optional; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.SchemaDefine; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.util.JsonUtil; + +public class AbstractMetaManager { + + protected final MetaDriver metaDriver; + protected final String cluster; + + public AbstractMetaManager(MetaDriver metaDriver, String cluster) { + this.metaDriver = metaDriver; + this.cluster = cluster; + } + + protected static String serialize(SchemaDefine.AuthElement element) { + Map objectMap = element.asMap(); + return JsonUtil.toJson(objectMap); + } + + protected static String serialize(SchemaElement element) { + Map objectMap = element.asMap(); + return JsonUtil.toJson(objectMap); + } + + @SuppressWarnings("unchecked") + protected static Map configMap(String config) { + return JsonUtil.fromJson(config, Map.class); + } + + protected void listen(String key, Consumer consumer) { + this.metaDriver.listen(key, consumer); + } + + protected void listenPrefix(String prefix, Consumer consumer) { + this.metaDriver.listenPrefix(prefix, consumer); + } + + public String getRaw(String key) { + String result = this.metaDriver.get(key); + return Optional.ofNullable(result).orElse(""); + } + + public void putOrDeleteRaw(String key, String val) { + if (StringUtils.isEmpty(val)) { + this.metaDriver.delete(key); + } else { + this.metaDriver.put(key, val); + } + } + + public LockResult tryLock(String key) { + return this.metaDriver.tryLock(key, LOCK_DEFAULT_LEASE, + LOCK_DEFAULT_TIMEOUT); + } + + public void unlock(LockResult lockResult, String... keys) { + String key = String.join(META_PATH_DELIMITER, keys); + this.unlock(key, lockResult); + } + + public void unlock(String key, LockResult lockResult) { + this.metaDriver.unlock(key, lockResult); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java new file mode 100644 index 0000000000..2160226fbf --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java @@ -0,0 +1,1035 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ACCESS; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_AUTH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_AUTH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_BELONG; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GROUP; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ROLE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TARGET; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_USER; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.HugeAccess; +import org.apache.hugegraph.auth.HugeBelong; +import org.apache.hugegraph.auth.HugeGroup; +import org.apache.hugegraph.auth.HugePermission; +import org.apache.hugegraph.auth.HugeRole; +import org.apache.hugegraph.auth.HugeTarget; +import org.apache.hugegraph.auth.HugeUser; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; + +public class AuthMetaManager extends AbstractMetaManager { + + public AuthMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + + public void createUser(HugeUser user) throws IOException { + String result = this.metaDriver.get(userKey(user.name())); + E.checkArgument(StringUtils.isEmpty(result), + "The user name '%s' has existed", user.name()); + this.metaDriver.put(userKey(user.name()), serialize(user)); + } + + public HugeUser updateUser(HugeUser user) throws IOException { + String result = this.metaDriver.get(userKey(user.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The user name '%s' does not existed", user.name()); + + HugeUser ori = HugeUser.fromMap(JsonUtil.fromJson(result, Map.class)); + ori.update(new Date()); + ori.nickname(user.nickname()); + ori.password(user.password()); + ori.phone(user.phone()); + ori.email(user.email()); + ori.avatar(user.avatar()); + ori.description(user.description()); + this.metaDriver.put(userKey(user.name()), serialize(ori)); + return ori; + } + + public HugeUser deleteUser(Id id) throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(userKey(id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The user name '%s' does not existed", id.asString()); + this.metaDriver.delete(userKey(id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "USER", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeUser.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeUser findUser(String name) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(userKey(name)); + if (StringUtils.isEmpty(result)) { + return null; + } + + return HugeUser.fromMap(JsonUtil.fromJson(result, Map.class)); + } + + public List listUsers(List ids) throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Id id : ids) { + if (userMap.containsKey(userKey(id.asString()))) { + String value = userMap.get(userKey(id.asString())); + Map map = JsonUtil.fromJson(value, Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listUsersByGroup(String group, long limit) + throws IOException, ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Map.Entry item : userMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + + List belongs = new ArrayList<>(); + Map belongMap = this.metaDriver.scanWithPrefix( + belongListKey("*")); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && belongs.size() >= limit) { + break; + } + String groupName = arrayFromBelong(item.getKey())[2]; + if (groupName.equals(group)) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + belongs.add(belong); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllUsers(long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Map.Entry item : userMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + + return result; + } + + public Id createGroup(HugeGroup group) throws IOException { + String key = groupKey(group.name()); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isEmpty(result), + "The group name '%s' has existed", group.name()); + this.metaDriver.put(key, serialize(group)); + return group.id(); + } + + public HugeGroup updateGroup(HugeGroup group) throws IOException { + String key = groupKey(group.name()); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isNotEmpty(result), + "The group name '%s' is not existed", group.name()); + Map map = JsonUtil.fromJson(result, Map.class); + HugeGroup ori = HugeGroup.fromMap(map); + ori.update(new Date()); + ori.nickname(group.nickname()); + ori.description(group.description()); + this.metaDriver.put(key, serialize(ori)); + return ori; + } + + public HugeGroup deleteGroup(Id id) throws IOException, + ClassNotFoundException { + String name = id.asString(); + String key = groupKey(name); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isNotEmpty(result), + "The group name '%s' is not existed", name); + this.metaDriver.delete(key); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "GROUP", + name)); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeGroup.fromMap(map); + } + + public HugeGroup findGroup(String name) throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(groupKey(name)); + if (StringUtils.isEmpty(result)) { + return null; + } + + return HugeGroup.fromMap(JsonUtil.fromJson(result, Map.class)); + } + + public List listGroups(long limit) throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map groupMap = + this.metaDriver.scanWithPrefix(groupListKey()); + for (Map.Entry item : groupMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeGroup group = HugeGroup.fromMap(map); + result.add(group); + } + + return result; + } + + @SuppressWarnings("unchecked") + public Id createRole(String graphSpace, HugeRole role) + throws IOException { + Id roleId = IdGenerator.of(role.name()); + HugeRole existed = this.findRole(graphSpace, roleId); + // not support too many role to share same id + E.checkArgument(existed == null, "The role name '%s' has existed", + role.name()); + role.name(roleId.asString()); + + this.metaDriver.put(roleKey(graphSpace, role.name()), + serialize(role)); + return roleId; + } + + @SuppressWarnings("unchecked") + public HugeRole updateRole(String graphSpace, HugeRole role) + throws IOException { + String result = this.metaDriver.get(roleKey(graphSpace, role.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", role.name()); + + // only description and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeRole ori = HugeRole.fromMap(map); + ori.update(new Date()); + ori.nickname(role.nickname()); + ori.description(role.description()); + this.metaDriver.put(roleKey(graphSpace, ori.name()), + serialize(ori)); + return ori; + } + + @SuppressWarnings("unchecked") + public HugeRole deleteRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", id.asString()); + this.metaDriver.delete(roleKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "ROLE", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeRole findRole(String graphSpace, Id id) { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeRole getRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listRoles(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map roleMap = + this.metaDriver.scanWithPrefix(roleListKey(graphSpace)); + for (Id id : ids) { + if (roleMap.containsKey(roleKey(graphSpace, id.asString()))) { + String roleString = roleMap.get(roleKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(roleString, + Map.class); + HugeRole role = HugeRole.fromMap(map); + result.add(role); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllRoles(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map roleMap = + this.metaDriver.scanWithPrefix(roleListKey(graphSpace)); + for (Map.Entry item : roleMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeRole role = HugeRole.fromMap(map); + result.add(role); + } + + return result; + } + + public Id createTarget(String graphSpace, HugeTarget target) + throws IOException { + String result = this.metaDriver.get(targetKey(graphSpace, + target.name())); + E.checkArgument(StringUtils.isEmpty(result), + "The target name '%s' has existed", target.name()); + this.metaDriver.put(targetKey(graphSpace, target.name()), + serialize(target)); + return target.id(); + } + + @SuppressWarnings("unchecked") + public HugeTarget updateTarget(String graphSpace, HugeTarget target) + throws IOException { + String result = this.metaDriver.get(targetKey(graphSpace, + target.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", target.name()); + + // only resources and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeTarget ori = HugeTarget.fromMap(map); + ori.update(new Date()); + ori.graph(target.graph()); + ori.description(target.description()); + ori.resources(target.resources()); + this.metaDriver.put(targetKey(graphSpace, target.name()), + serialize(ori)); + this.putAuthEvent(new MetaManager.AuthEvent("UPDATE", "TARGET", + ori.id().asString())); + return ori; + } + + @SuppressWarnings("unchecked") + public HugeTarget deleteTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", id.asString()); + this.metaDriver.delete(targetKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "TARGET", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeTarget findTarget(String graphSpace, Id id) { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeTarget getTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listTargets(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map targetMap = + this.metaDriver.scanWithPrefix(targetListKey(graphSpace)); + for (Id id : ids) { + if (targetMap.containsKey(targetKey(graphSpace, id.asString()))) { + String targetString = targetMap.get(targetKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(targetString, + Map.class); + HugeTarget target = HugeTarget.fromMap(map); + result.add(target); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllTargets(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map targetMap = + this.metaDriver.scanWithPrefix(targetListKey(graphSpace)); + for (Map.Entry item : targetMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeTarget target = HugeTarget.fromMap(map); + result.add(target); + } + + return result; + } + + public Id createBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String belongId = this.checkBelong(graphSpace, belong); + String result = this.metaDriver.get(belongKey(graphSpace, belongId)); + E.checkArgument(StringUtils.isEmpty(result), + "The belong name '%s' has existed", belongId); + this.metaDriver.put(belongKey(graphSpace, belongId), serialize(belong)); + this.putAuthEvent(new MetaManager.AuthEvent("CREATE", "BELONG", belongId)); + return IdGenerator.of(belongId); + } + + @SuppressWarnings("unchecked") + public HugeBelong updateBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String belongId = this.checkBelong(graphSpace, belong); + String result = this.metaDriver.get(belongKey(graphSpace, belongId)); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", belongId); + + // only description and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeBelong ori = HugeBelong.fromMap(map); + ori.update(new Date()); + ori.description(belong.description()); + this.metaDriver.put(belongKey(graphSpace, belongId), serialize(ori)); + return ori; + } + + public String checkBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String source = belong.source().asString(); + String target = belong.target().asString(); + String link = belong.link(); + HugeUser user = this.findUser(source); + HugeGroup group = this.findGroup(source); + E.checkArgument(user != null || group != null, + "The source name '%s' is not existed", + source); + HugeGroup groupTarget = this.findGroup(target); + HugeRole role = this.findRole(graphSpace, belong.target()); + E.checkArgument(role != null || groupTarget != null, + "The target name '%s' is not existed", + target); + + return belongId(source, target, link); + } + + @SuppressWarnings("unchecked") + public HugeBelong deleteBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", id.asString()); + this.metaDriver.delete(belongKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "BELONG", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeBelong.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeBelong getBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", id.asString()); + + Map map = JsonUtil.fromJson(result, Map.class); + return HugeBelong.fromMap(map); + } + + public boolean existBelong(String graphSpace, Id id) { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + return StringUtils.isNotEmpty(result); + } + + @SuppressWarnings("unchecked") + public List listBelong(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = + this.metaDriver.scanWithPrefix(belongListKey(graphSpace)); + for (Id id : ids) { + if (belongMap.containsKey(belongKey(graphSpace, id.asString()))) { + String belongString = belongMap.get(belongKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(belongString, + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllBelong(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = + this.metaDriver.scanWithPrefix(belongListKey(graphSpace)); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listBelongBySource(String graphSpace, Id source, + String link, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + + String sourceLink = (HugeBelong.ALL.equals(link)) ? source.asString() : + source.asString() + "->" + link; + + String key = belongListKeyBySource(graphSpace, sourceLink); + + Map belongMap = this.metaDriver.scanWithPrefix(key); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + + return result; + } + + public String[] arrayFromBelong(String belongKey) { + E.checkArgument(StringUtils.isNotEmpty(belongKey), + "The belong name '%s' is empty", belongKey); + E.checkArgument(belongKey.contains("->"), + "The belong name '%s' is invalid", belongKey); + String[] items = belongKey.split("->"); + E.checkArgument(items.length == 3, + "The belong name '%s' is invalid", belongKey); + return items; + } + + @SuppressWarnings("unchecked") + public List listBelongByTarget(String graphSpace, + Id role, String link, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = this.metaDriver.scanWithPrefix( + belongListKey(graphSpace)); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + String[] array = arrayFromBelong(item.getKey()); + String linkName = array[1]; + String roleName = array[2]; + if ((linkName.equals(link) || "*".equals(link)) && + roleName.equals(role.asString())) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + } + + return result; + } + + public Id createAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + String accessId = this.checkAccess(graphSpace, access); + String result = this.metaDriver.get(accessKey(graphSpace, accessId)); + E.checkArgument(StringUtils.isEmpty(result), + "The access name '%s' has existed", accessId); + this.metaDriver.put(accessKey(graphSpace, accessId), serialize(access)); + this.putAuthEvent(new MetaManager.AuthEvent("CREATE", "ACCESS", accessId)); + return IdGenerator.of(accessId); + } + + @SuppressWarnings("unchecked") + public HugeAccess updateAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + String accessId = this.checkAccess(graphSpace, access); + String result = this.metaDriver.get(accessKey(graphSpace, accessId)); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", accessId); + Map map = JsonUtil.fromJson(result, Map.class); + HugeAccess existed = HugeAccess.fromMap(map); + E.checkArgument(existed.permission().code() == + access.permission().code(), + "The access name '%s' has existed", accessId); + + // only description and update-time could be updated + Map oriMap = JsonUtil.fromJson(result, Map.class); + HugeAccess ori = HugeAccess.fromMap(oriMap); + ori.update(new Date()); + ori.description(access.description()); + this.metaDriver.put(accessKey(graphSpace, accessId), serialize(ori)); + return ori; + } + + public String checkAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + HugeRole role = this.getRole(graphSpace, access.source()); + E.checkArgument(role != null, + "The role name '%s' is not existed", + access.source().asString()); + + HugeTarget target = this.getTarget(graphSpace, access.target()); + E.checkArgument(target != null, + "The target name '%s' is not existed", + access.target().asString()); + + return accessId(role.name(), target.name(), access.permission()); + } + + @SuppressWarnings("unchecked") + public HugeAccess deleteAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", id.asString()); + this.metaDriver.delete(accessKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "ACCESS", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeAccess findAccess(String graphSpace, Id id) { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeAccess getAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listAccess(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = + this.metaDriver.scanWithPrefix(accessListKey(graphSpace)); + for (Id id : ids) { + if (accessMap.containsKey(accessKey(graphSpace, id.asString()))) { + String accessString = accessMap.get(accessKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(accessString, + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllAccess(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = + this.metaDriver.scanWithPrefix(accessListKey(graphSpace)); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAccessByRole(String graphSpace, + Id role, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = this.metaDriver.scanWithPrefix( + accessListKeyByRole(graphSpace, role.asString())); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + + return result; + } + + public String targetFromAccess(String accessKey) { + E.checkArgument(StringUtils.isNotEmpty(accessKey), + "The access name '%s' is empty", accessKey); + E.checkArgument(accessKey.contains("->"), + "The access name '%s' is invalid", accessKey); + String[] items = accessKey.split("->"); + E.checkArgument(items.length == 3, + "The access name '%s' is invalid", accessKey); + return items[2]; + } + + public void clearGraphAuth(String graphSpace) { + E.checkArgument(StringUtils.isNotEmpty(graphSpace), + "The graphSpace is empty"); + String prefix = this.authPrefix(graphSpace); + this.metaDriver.deleteWithPrefix(prefix); + } + + @SuppressWarnings("unchecked") + public List listAccessByTarget(String graphSpace, + Id target, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = this.metaDriver.scanWithPrefix( + accessListKey(graphSpace)); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + String targetName = targetFromAccess(item.getKey()); + if (targetName.equals(target.asString())) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + } + + return result; + } + + public void listenAuthEvent(Consumer consumer) { + this.listen(this.authEventKey(), consumer); + } + + public void putAuthEvent(MetaManager.AuthEvent event) { + this.metaDriver.put(authEventKey(), JsonUtil.toJson(event.asMap())); + } + + public String belongId(String source, String target, String link) { + E.checkArgument(StringUtils.isNotEmpty(source) && + StringUtils.isNotEmpty(target), + "The source name '%s' or target name '%s' is empty", + source, target); + return String.join("->", source, link, target); + } + + public String accessId(String roleName, String targetName, + HugePermission permission) { + E.checkArgument(StringUtils.isNotEmpty(roleName) && + StringUtils.isNotEmpty(targetName), + "The role name '%s' or target name '%s' is empty", + roleName, targetName); + String code = String.valueOf(permission.code()); + return String.join("->", roleName, code, targetName); + } + + public String authEventKey() { + // HUGEGRAPH/{cluster}/AUTH_EVENT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH_EVENT); + } + + private String userKey(String name) { + // HUGEGRAPH/{cluster}/AUTH/USER/{user} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_USER, + name); + } + + private String userListKey() { + // HUGEGRAPH/{cluster}/AUTH/USER + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_USER); + } + + private String authPrefix(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH); + } + + private String groupKey(String group) { + // HUGEGRAPH/{cluster}/AUTH/GROUP/{group} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_GROUP, + group); + } + + private String groupListKey() { + // HUGEGRAPH/{cluster}/AUTH/GROUP + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_GROUP); + } + + private String roleKey(String graphSpace, String role) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ROLE/{role} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ROLE, + role); + } + + private String roleListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ROLE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ROLE); + } + + private String targetKey(String graphSpace, String target) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/TARGET/{target} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_TARGET, + target); + } + + private String targetListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/TARGET + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_TARGET); + } + + private String belongKey(String graphSpace, String belong) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG/{belong} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG, + belong); + } + + private String belongListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG); + } + + private String belongListKeyBySource(String graphSpace, String source) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG/{userName} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG, + source + "->"); + } + + private String accessKey(String graphSpace, String access) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS/{role->op->target} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS, + access); + } + + private String accessListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS); + } + + private String accessListKeyByRole(String graphSpace, String roleName) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS/{roleName} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS, + roleName + "->"); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java new file mode 100644 index 0000000000..280c80bc75 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GREMLIN_YAML; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REST_PROPERTIES; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE; + +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.util.JsonUtil; + +public class ConfigMetaManager extends AbstractMetaManager { + + public ConfigMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + @SuppressWarnings("unchecked") + public Map restProperties(String graphSpace, + String serviceId) { + Map map = null; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + } + return map; + } + + @SuppressWarnings("unchecked") + public Map restProperties(String graphSpace, + String serviceId, + Map properties) { + Map map; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + for (Map.Entry item : properties.entrySet()) { + map.put(item.getKey(), item.getValue()); + } + } else { + map = properties; + } + this.metaDriver.put(restPropertiesKey(graphSpace, serviceId), + JsonUtil.toJson(map)); + return map; + } + + @SuppressWarnings("unchecked") + public Map deleteRestProperties(String graphSpace, + String serviceId, + String key) { + Map map = null; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + map.remove(key); + this.metaDriver.put(restPropertiesKey(graphSpace, serviceId), + JsonUtil.toJson(map)); + } + return map; + } + + @SuppressWarnings("unchecked") + public Map clearRestProperties(String graphSpace, + String serviceId) { + Map map = null; + String key = restPropertiesKey(graphSpace, serviceId); + String result = this.metaDriver.get(key); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + this.metaDriver.delete(key); + } + return map; + } + + public String gremlinYaml(String graphSpace, String serviceId) { + return this.metaDriver.get(gremlinYamlKey(graphSpace, serviceId)); + } + + public String gremlinYaml(String graphSpace, String serviceId, + String yaml) { + this.metaDriver.put(gremlinYamlKey(graphSpace, serviceId), yaml); + return yaml; + } + + public void listenRestPropertiesUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.listen(this.restPropertiesKey(graphSpace, serviceId), consumer); + } + + public void listenGremlinYamlUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.listen(this.gremlinYamlKey(graphSpace, serviceId), consumer); + } + + + private String restPropertiesKey(String graphSpace, String serviceId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE/ + // {serviceId}/REST_PROPERTIES + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE, + serviceId, + META_PATH_REST_PROPERTIES); + } + + private String gremlinYamlKey(String graphSpace, String serviceId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE/ + // {serviceId}/GREMLIN_YAML + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE, + serviceId, + META_PATH_GREMLIN_YAML); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java new file mode 100644 index 0000000000..859746bd9b --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java @@ -0,0 +1,284 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_CLEAR; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EDGE_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPH_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_JOIN; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_VERTEX_LABEL; + +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.collection.CollectionFactory; +import org.apache.logging.log4j.util.Strings; + +public class GraphMetaManager extends AbstractMetaManager { + + public GraphMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + private static String graphName(String graphSpace, String name) { + return String.join(META_PATH_JOIN, graphSpace, name); + } + + public Map> graphConfigs(String graphSpace) { + Map> configs = + CollectionFactory.newMap(CollectionType.EC); + Map keyValues = this.metaDriver.scanWithPrefix( + this.graphConfPrefix(graphSpace)); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + String name = parts[parts.length - 1]; + String graphName = String.join("-", graphSpace, name); + configs.put(graphName, configMap(entry.getValue())); + } + return configs; + } + + public void removeGraphConfig(String graphSpace, String graph) { + this.metaDriver.delete(this.graphConfKey(graphSpace, graph)); + } + + public void notifyGraphAdd(String graphSpace, String graph) { + this.metaDriver.put(this.graphAddKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphRemove(String graphSpace, String graph) { + this.metaDriver.put(this.graphRemoveKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphUpdate(String graphSpace, String graph) { + this.metaDriver.put(this.graphUpdateKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphClearKey(), + graphName(graphSpace, graph)); + } + + public void notifySchemaCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.schemaCacheClearKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphCacheClearKey(), + graphName(graphSpace, graph)); + } + + /** + * 通知 点信息 cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphVertexCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphVertexCacheClearKey(), + graphName(graphSpace, graph)); + } + + /** + * 通知 边信息 cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphEdgeCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphEdgeCacheClearKey(), + graphName(graphSpace, graph)); + } + + public Map getGraphConfig(String graphSpace, String graph) { + return configMap(this.metaDriver.get(this.graphConfKey(graphSpace, + graph))); + } + + public void addGraphConfig(String graphSpace, String graph, + Map configs) { + this.metaDriver.put(this.graphConfKey(graphSpace, graph), + JsonUtil.toJson(configs)); + } + + public void updateGraphConfig(String graphSpace, String graph, + Map configs) { + this.metaDriver.put(this.graphConfKey(graphSpace, graph), + JsonUtil.toJson(configs)); + } + + public void listenGraphAdd(Consumer consumer) { + this.listen(this.graphAddKey(), consumer); + } + + public void listenGraphUpdate(Consumer consumer) { + this.listen(this.graphUpdateKey(), consumer); + } + + public void listenGraphRemove(Consumer consumer) { + this.listen(this.graphRemoveKey(), consumer); + } + + public void listenGraphClear(Consumer consumer) { + this.listen(this.graphClearKey(), consumer); + } + + public void listenSchemaCacheClear(Consumer consumer) { + this.listen(this.schemaCacheClearKey(), consumer); + } + + public void listenGraphCacheClear(Consumer consumer) { + this.listen(this.graphCacheClearKey(), consumer); + } + + public void listenGraphVertexCacheClear(Consumer consumer) { + this.listen(this.graphVertexCacheClearKey(), consumer); + } + + public void listenGraphEdgeCacheClear(Consumer consumer) { + this.listen(this.graphEdgeCacheClearKey(), consumer); + } + + private String graphConfPrefix(String graphSpace) { + return this.graphConfKey(graphSpace, Strings.EMPTY); + } + + private String graphConfKey(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH_CONF/{graph} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_GRAPH_CONF, + graph); + } + + private String graphAddKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_ADD); + } + + private String graphRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_REMOVE); + } + + private String graphUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_UPDATE); + } + + private String graphClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_CLEAR); + } + + private String schemaCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/SCHEMA/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_SCHEMA, + META_PATH_CLEAR); + } + + private String graphCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_CLEAR); + } + + /** + * pd监听 vertex label更新的key + * + * @return + */ + private String graphVertexCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/META_PATH_VERTEX_LABEL/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_VERTEX_LABEL, + META_PATH_CLEAR); + } + + /** + * pd监听 edge label更新的key + * + * @return + */ + private String graphEdgeCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/META_PATH_EDGE_LABEL/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_EDGE_LABEL, + META_PATH_CLEAR); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java new file mode 100644 index 0000000000..67d469aa5a --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_KAFKA; + +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.MetaDriver; + +public class KafkaMetaManager extends AbstractMetaManager { + + public KafkaMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public void listenKafkaConfig(Consumer consumer) { + String prefix = this.kafkaPrefixKey(); + this.listenPrefix(prefix, consumer); + } + + private String kafkaPrefixKey() { + // HUGEGRAPH/{cluster}/KAFKA + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java new file mode 100644 index 0000000000..7910887266 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import org.apache.hugegraph.meta.MetaDriver; + +public class LockMetaManager extends AbstractMetaManager { + + public LockMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java new file mode 100644 index 0000000000..57e2839837 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java @@ -0,0 +1,517 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EDGE_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ID; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_INDEX_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_NAME; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_PROPERTY_KEY; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_VERTEX_LABEL; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.PdMetaDriver; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.JsonUtil; + +public class SchemaMetaManager extends AbstractMetaManager { + private final HugeGraph graph; + + public SchemaMetaManager(MetaDriver metaDriver, String cluster, HugeGraph graph) { + super(metaDriver, cluster); + this.graph = graph; + } + + public static void main(String[] args) { + MetaDriver metaDriver = new PdMetaDriver("127.0.0.1:8686"); + SchemaMetaManager schemaMetaManager = new SchemaMetaManager(metaDriver, "hg", null); + PropertyKey propertyKey = new PropertyKey(null, IdGenerator.of(5), "test"); + propertyKey.userdata("key1", "value1"); + propertyKey.userdata("key2", 23); + schemaMetaManager.addPropertyKey("DEFAULT1", "hugegraph", propertyKey); + +// PropertyKey propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", +// IdGenerator.of(1)); + schemaMetaManager.removePropertyKey("DEFAULT", "hugegraph", IdGenerator.of(1)); + +// propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", "test"); +// System.out.println(propertyKey1 ); +// +// propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", "5"); +// System.out.println(propertyKey1 ); + } + + public void addPropertyKey(String graphSpace, String graph, + PropertyKey propertyKey) { + String content = serialize(propertyKey); + this.metaDriver.put(propertyKeyIdKey(graphSpace, graph, + propertyKey.id()), content); + this.metaDriver.put(propertyKeyNameKey(graphSpace, graph, + propertyKey.name()), content); + } + + public void updatePropertyKey(String graphSpace, String graph, + PropertyKey pkey) { + this.addPropertyKey(graphSpace, graph, pkey); + } + + @SuppressWarnings("unchecked") + public PropertyKey getPropertyKey(String graphSpace, String graph, + Id propertyKey) { + String content = this.metaDriver.get(propertyKeyIdKey(graphSpace, graph, + propertyKey)); + if (content == null || content.length() == 0) { + return null; + } else { + return PropertyKey.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public PropertyKey getPropertyKey(String graphSpace, String graph, + String propertyKey) { + String content = this.metaDriver.get(propertyKeyNameKey(graphSpace, + graph, + propertyKey)); + if (content == null || content.length() == 0) { + return null; + } else { + return PropertyKey.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getPropertyKeys(String graphSpace, String graph) { + Map propertyKeysKvs = this.metaDriver.scanWithPrefix( + propertyKeyPrefix(graphSpace, graph)); + List propertyKeys = + new ArrayList<>(propertyKeysKvs.size()); + for (String value : propertyKeysKvs.values()) { + propertyKeys.add(PropertyKey.fromMap(JsonUtil.fromJson(value, Map.class), this.graph)); + } + return propertyKeys; + } + + public Id removePropertyKey(String graphSpace, String graph, + Id propertyKey) { + PropertyKey p = this.getPropertyKey(graphSpace, graph, propertyKey); + this.metaDriver.delete(propertyKeyNameKey(graphSpace, graph, + p.name())); + this.metaDriver.delete(propertyKeyIdKey(graphSpace, graph, + propertyKey)); + return IdGenerator.ZERO; + } + + public void addVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + String content = serialize(vertexLabel); + this.metaDriver.put(vertexLabelIdKey(graphSpace, graph, + vertexLabel.id()), content); + this.metaDriver.put(vertexLabelNameKey(graphSpace, graph, + vertexLabel.name()), content); + } + + public void updateVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.addVertexLabel(graphSpace, graph, vertexLabel); + } + + @SuppressWarnings("unchecked") + public VertexLabel getVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + String content = this.metaDriver.get(vertexLabelIdKey(graphSpace, graph, + vertexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return VertexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public VertexLabel getVertexLabel(String graphSpace, String graph, + String vertexLabel) { + String content = this.metaDriver.get(vertexLabelNameKey(graphSpace, + graph, + vertexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return VertexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getVertexLabels(String graphSpace, String graph) { + Map vertexLabelKvs = this.metaDriver.scanWithPrefix( + vertexLabelPrefix(graphSpace, graph)); + List vertexLabels = + new ArrayList<>(vertexLabelKvs.size()); + for (String value : vertexLabelKvs.values()) { + vertexLabels.add(VertexLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return vertexLabels; + } + + public Id removeVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + VertexLabel v = this.getVertexLabel(graphSpace, graph, + vertexLabel); + this.metaDriver.delete(vertexLabelNameKey(graphSpace, graph, + v.name())); + this.metaDriver.delete(vertexLabelIdKey(graphSpace, graph, + vertexLabel)); + return IdGenerator.ZERO; + } + + public void addEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + String content = serialize(edgeLabel); + this.metaDriver.put(edgeLabelIdKey(graphSpace, graph, + edgeLabel.id()), content); + this.metaDriver.put(edgeLabelNameKey(graphSpace, graph, + edgeLabel.name()), content); + } + + public void updateEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.addEdgeLabel(graphSpace, graph, edgeLabel); + } + + @SuppressWarnings("unchecked") + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + String content = this.metaDriver.get(edgeLabelIdKey(graphSpace, graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return EdgeLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + String edgeLabel) { + String content = this.metaDriver.get(edgeLabelNameKey(graphSpace, + graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return EdgeLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getEdgeLabels(String graphSpace, String graph) { + Map edgeLabelKvs = this.metaDriver.scanWithPrefix( + edgeLabelPrefix(graphSpace, graph)); + List edgeLabels = + new ArrayList<>(edgeLabelKvs.size()); + for (String value : edgeLabelKvs.values()) { + edgeLabels.add(EdgeLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return edgeLabels; + } + + public Id removeEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + EdgeLabel e = this.getEdgeLabel(graphSpace, graph, + edgeLabel); + this.metaDriver.delete(edgeLabelNameKey(graphSpace, graph, + e.name())); + this.metaDriver.delete(edgeLabelIdKey(graphSpace, graph, + edgeLabel)); + return IdGenerator.ZERO; + } + + public void addIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + String content = serialize(indexLabel); + this.metaDriver.put(indexLabelIdKey(graphSpace, graph, + indexLabel.id()), content); + this.metaDriver.put(indexLabelNameKey(graphSpace, graph, + indexLabel.name()), content); + } + + public void updateIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.addIndexLabel(graphSpace, graph, indexLabel); + } + + @SuppressWarnings("unchecked") + public IndexLabel getIndexLabel(String graphSpace, String graph, + Id indexLabel) { + String content = this.metaDriver.get(indexLabelIdKey(graphSpace, graph, + indexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return IndexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public IndexLabel getIndexLabel(String graphSpace, String graph, + String edgeLabel) { + String content = this.metaDriver.get(indexLabelNameKey(graphSpace, + graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return IndexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getIndexLabels(String graphSpace, String graph) { + Map indexLabelKvs = this.metaDriver.scanWithPrefix( + indexLabelPrefix(graphSpace, graph)); + List indexLabels = + new ArrayList<>(indexLabelKvs.size()); + for (String value : indexLabelKvs.values()) { + indexLabels.add(IndexLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return indexLabels; + } + + public Id removeIndexLabel(String graphSpace, String graph, Id indexLabel) { + IndexLabel i = this.getIndexLabel(graphSpace, graph, + indexLabel); + this.metaDriver.delete(indexLabelNameKey(graphSpace, graph, + i.name())); + this.metaDriver.delete(indexLabelIdKey(graphSpace, graph, + indexLabel)); + return IdGenerator.ZERO; + } + + private String propertyKeyPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_NAME); + } + + private String propertyKeyIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_ID, + id.asString()); + } + + private String propertyKeyNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_NAME, + name); + } + + private String vertexLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_NAME); + } + + private String vertexLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_ID, + id.asString()); + } + + private String vertexLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_NAME, + name); + } + + private String edgeLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTYKEY/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_NAME); + } + + private String edgeLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTYKEY/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_ID, + id.asString()); + } + + private String edgeLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/EDGE_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_NAME, + name); + } + + private String indexLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_NAME); + } + + private String indexLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_ID, + id.asString()); + } + + private String indexLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_NAME, + name); + } + + private String graphNameKey(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph}/SCHEMA + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA); + } + + public void clearAllSchema(String graphSpace, String graph) { + this.metaDriver.deleteWithPrefix(graphNameKey(graphSpace, graph)); + } + +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java new file mode 100644 index 0000000000..bc2f1448bc --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA_TEMPLATE; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.SchemaTemplate; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.logging.log4j.util.Strings; + +public class SchemaTemplateMetaManager extends AbstractMetaManager { + + public SchemaTemplateMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public Set schemaTemplates(String graphSpace) { + Set result = new HashSet<>(); + Map keyValues = this.metaDriver.scanWithPrefix( + this.schemaTemplatePrefix(graphSpace)); + for (String key : keyValues.keySet()) { + String[] parts = key.split(META_PATH_DELIMITER); + result.add(parts[parts.length - 1]); + } + return result; + } + + @SuppressWarnings("unchecked") + public SchemaTemplate schemaTemplate(String graphSpace, + String schemaTemplate) { + String s = this.metaDriver.get(this.schemaTemplateKey(graphSpace, + schemaTemplate)); + if (StringUtils.isEmpty(s)) { + return null; + } + return SchemaTemplate.fromMap(JsonUtil.fromJson(s, Map.class)); + } + + public void addSchemaTemplate(String graphSpace, SchemaTemplate template) { + + String key = this.schemaTemplateKey(graphSpace, template.name()); + + String data = this.metaDriver.get(key); + if (StringUtils.isNotEmpty(data)) { + throw new HugeException("Cannot create schema template " + + "since it has been created"); + } + + this.metaDriver.put(this.schemaTemplateKey(graphSpace, template.name()), + JsonUtil.toJson(template.asMap())); + } + + public void updateSchemaTemplate(String graphSpace, + SchemaTemplate template) { + this.metaDriver.put(this.schemaTemplateKey(graphSpace, template.name()), + JsonUtil.toJson(template.asMap())); + } + + public void removeSchemaTemplate(String graphSpace, String name) { + this.metaDriver.delete(this.schemaTemplateKey(graphSpace, name)); + } + + public void clearSchemaTemplate(String graphSpace) { + String prefix = this.schemaTemplatePrefix(graphSpace); + this.metaDriver.deleteWithPrefix(prefix); + } + + private String schemaTemplatePrefix(String graphSpace) { + return this.schemaTemplateKey(graphSpace, Strings.EMPTY); + } + + private String schemaTemplateKey(String graphSpace, String schemaTemplate) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SCHEMA_TEMPLATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SCHEMA_TEMPLATE, + schemaTemplate); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java new file mode 100644 index 0000000000..3c03be3433 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_JOIN; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.Service; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.logging.log4j.util.Strings; + +public class ServiceMetaManager extends AbstractMetaManager { + + public ServiceMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + private static String serviceName(String graphSpace, String name) { + return String.join(META_PATH_JOIN, graphSpace, name); + } + + public Map serviceConfigs(String graphSpace) { + Map serviceMap = new HashMap<>(); + Map keyValues = this.metaDriver.scanWithPrefix( + this.serviceConfPrefix(graphSpace)); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + serviceMap.put(parts[parts.length - 1], + JsonUtil.fromJson(entry.getValue(), Service.class)); + } + return serviceMap; + } + + public String getServiceRawConfig(String graphSpace, String service) { + return this.metaDriver.get(this.serviceConfKey(graphSpace, service)); + } + + public Service getServiceConfig(String graphSpace, String service) { + String s = this.getServiceRawConfig(graphSpace, service); + return this.parseServiceRawConfig(s); + } + + public Service parseServiceRawConfig(String serviceRawConf) { + return JsonUtil.fromJson(serviceRawConf, Service.class); + } + + public void notifyServiceAdd(String graphSpace, String name) { + this.metaDriver.put(this.serviceAddKey(), + serviceName(graphSpace, name)); + } + + public void notifyServiceRemove(String graphSpace, String name) { + this.metaDriver.put(this.serviceRemoveKey(), + serviceName(graphSpace, name)); + } + + public void notifyServiceUpdate(String graphSpace, String name) { + this.metaDriver.put(this.serviceUpdateKey(), + serviceName(graphSpace, name)); + } + + public Service service(String graphSpace, String name) { + String service = this.metaDriver.get(this.serviceConfKey(graphSpace, + name)); + if (StringUtils.isEmpty(service)) { + return null; + } + return JsonUtil.fromJson(service, Service.class); + } + + public void addServiceConfig(String graphSpace, Service service) { + this.metaDriver.put(this.serviceConfKey(graphSpace, service.name()), + JsonUtil.toJson(service)); + } + + public void removeServiceConfig(String graphSpace, String service) { + this.metaDriver.delete(this.serviceConfKey(graphSpace, service)); + } + + public void updateServiceConfig(String graphSpace, Service service) { + this.addServiceConfig(graphSpace, service); + } + + public void listenServiceAdd(Consumer consumer) { + this.listen(this.serviceAddKey(), consumer); + } + + public void listenServiceRemove(Consumer consumer) { + this.listen(this.serviceRemoveKey(), consumer); + } + + public void listenServiceUpdate(Consumer consumer) { + this.listen(this.serviceUpdateKey(), consumer); + } + + private String serviceConfPrefix(String graphSpace) { + return this.serviceConfKey(graphSpace, Strings.EMPTY); + } + + private String serviceAddKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_ADD); + } + + private String serviceRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_REMOVE); + } + + private String serviceUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_UPDATE); + } + + private String serviceConfKey(String graphSpace, String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE_CONF + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE_CONF, + name); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java new file mode 100644 index 0000000000..d9e1688247 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE_LIST; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.GraphSpace; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.collection.CollectionFactory; + +public class SpaceMetaManager extends AbstractMetaManager { + + public SpaceMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public List listGraphSpace() { + List result = new ArrayList<>(); + Map graphSpaceMap = this.metaDriver.scanWithPrefix( + graphSpaceListKey()); + for (Map.Entry item : graphSpaceMap.entrySet()) { + result.add(item.getValue()); + } + + return result; + } + + public Map graphSpaceConfigs() { + Map keyValues = this.metaDriver.scanWithPrefix( + this.graphSpaceConfPrefix()); + Map configs = + CollectionFactory.newMap(CollectionType.EC); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + configs.put(parts[parts.length - 1], + JsonUtil.fromJson(entry.getValue(), GraphSpace.class)); + } + return configs; + } + + public GraphSpace graphSpace(String name) { + String space = this.metaDriver.get(this.graphSpaceConfKey(name)); + if (StringUtils.isEmpty(space)) { + return null; + } + return JsonUtil.fromJson(space, GraphSpace.class); + } + + public GraphSpace getGraphSpaceConfig(String graphSpace) { + String gs = this.metaDriver.get(this.graphSpaceConfKey(graphSpace)); + if (StringUtils.isEmpty(gs)) { + return null; + } + return JsonUtil.fromJson(gs, GraphSpace.class); + } + + public void addGraphSpaceConfig(String name, GraphSpace space) { + this.metaDriver.put(this.graphSpaceConfKey(name), + JsonUtil.toJson(space)); + } + + public void removeGraphSpaceConfig(String name) { + this.metaDriver.delete(this.graphSpaceConfKey(name)); + } + + public void updateGraphSpaceConfig(String name, GraphSpace space) { + this.metaDriver.put(this.graphSpaceConfKey(name), + JsonUtil.toJson(space)); + } + + public void appendGraphSpaceList(String name) { + String key = this.graphSpaceListKey(name); + this.metaDriver.put(key, name); + } + + public void clearGraphSpaceList(String name) { + String key = this.graphSpaceListKey(name); + this.metaDriver.delete(key); + } + + public void listenGraphSpaceAdd(Consumer consumer) { + this.listen(this.graphSpaceAddKey(), consumer); + } + + public void listenGraphSpaceRemove(Consumer consumer) { + this.listen(this.graphSpaceRemoveKey(), consumer); + } + + public void listenGraphSpaceUpdate(Consumer consumer) { + this.listen(this.graphSpaceUpdateKey(), consumer); + } + + public void notifyGraphSpaceAdd(String graphSpace) { + this.metaDriver.put(this.graphSpaceAddKey(), graphSpace); + } + + public void notifyGraphSpaceRemove(String graphSpace) { + this.metaDriver.put(this.graphSpaceRemoveKey(), graphSpace); + } + + public void notifyGraphSpaceUpdate(String graphSpace) { + this.metaDriver.put(this.graphSpaceUpdateKey(), graphSpace); + } + + private String graphSpaceConfPrefix() { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + META_PATH_CONF); + } + + private String graphSpaceAddKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_ADD); + } + + private String graphSpaceRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_REMOVE); + } + + private String graphSpaceUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_UPDATE); + } + + private String graphSpaceConfKey(String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + META_PATH_CONF, + name); + } + + private String graphSpaceListKey(String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE_LIST/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE_LIST, + name); + } + + private String graphSpaceListKey() { + // HUGEGRAPH/{cluster}/GRAPHSPACE_LIST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE_LIST); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java new file mode 100644 index 0000000000..3ab16eced5 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TASK; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TASK_LOCK; + +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.lock.LockResult; + +public class TaskMetaManager extends AbstractMetaManager { + + private static final String TASK_STATUS_POSTFIX = "Status"; + private static final String TASK_PROGRESS_POSTFIX = "Progress"; + private static final String TASK_CONTEXT_POSTFIX = "Context"; + private static final String TASK_RETRY_POSTFIX = "Retry"; + + public TaskMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public LockResult tryLockTask(String graphSpace, String graphName, + String taskId) { + String key = taskLockKey(graphSpace, graphName, taskId); + return this.tryLock(key); + } + + public boolean isLockedTask(String graphSpace, String graphName, + String taskId) { + + String key = taskLockKey(graphSpace, graphName, taskId); + // 判断当前任务是否锁定 + return metaDriver.isLocked(key); + } + + public void unlockTask(String graphSpace, String graphName, + String taskId, LockResult lockResult) { + + String key = taskLockKey(graphSpace, graphName, taskId); + + this.unlock(key, lockResult); + } + + private String taskLockKey(String graphSpace, + String graphName, + String taskId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/{graphName}/TASK/{id}/TASK_LOCK + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graphName, + META_PATH_TASK, + taskId, + META_PATH_TASK_LOCK); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java index b262ed07fe..b9fac4643a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java @@ -20,15 +20,21 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.type.define.EdgeLabelType; import org.apache.hugegraph.type.define.Frequency; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.E; import com.google.common.base.Objects; @@ -41,6 +47,7 @@ public class EdgeLabel extends SchemaLabel { private Id targetLabel = NONE_ID; private Frequency frequency; private List sortKeys; + private EdgeLabelType edgeLabelType; public EdgeLabel(final HugeGraph graph, Id id, String name) { super(graph, id, name); @@ -57,6 +64,10 @@ public Frequency frequency() { return this.frequency; } + public void edgeLabelType(EdgeLabelType type) { + this.edgeLabelType = type; + } + public void frequency(Frequency frequency) { this.frequency = frequency; } @@ -177,4 +188,168 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + Map map = new HashMap<>(); + + if (this.sourceLabel() != null && this.sourceLabel() != NONE_ID) { + map.put(P.SOURCE_LABEL, this.sourceLabel().asString()); + } + + if (this.targetLabel() != null && this.targetLabel() != NONE_ID) { + map.put(P.TARGET_LABEL, this.targetLabel().asString()); + } + + if (this.properties() != null) { + map.put(P.PROPERTIES, this.properties()); + } + + if (this.nullableKeys() != null) { + map.put(P.NULLABLE_KEYS, this.nullableKeys()); + } + + if (this.indexLabels() != null) { + map.put(P.INDEX_LABELS, this.indexLabels()); + } + + if (this.ttlStartTime() != null) { + map.put(P.TT_START_TIME, this.ttlStartTime().asString()); + } + + if (this.sortKeys() != null) { + map.put(P.SORT_KEYS, this.sortKeys); + } + + //map.put(P.EDGELABEL_TYPE, this.edgeLabelType); + //if (this.fatherId() != null) { + // map.put(P.FATHER_ID, this.fatherId().asString()); + //} + map.put(P.ENABLE_LABEL_INDEX, this.enableLabelIndex()); + map.put(P.TTL, String.valueOf(this.ttl())); + //map.put(P.LINKS, this.links()); + map.put(P.FREQUENCY, this.frequency().toString()); + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static EdgeLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(EdgeLabel.P.ID)); + String name = (String) map.get(EdgeLabel.P.NAME); + EdgeLabel edgeLabel = new EdgeLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + edgeLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + edgeLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.PROPERTIES: + Set ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.properties(ids); + break; + case P.NULLABLE_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.nullableKeys(ids); + break; + case P.INDEX_LABELS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.addIndexLabels(ids.toArray(new Id[0])); + break; + case P.ENABLE_LABEL_INDEX: + boolean enableLabelIndex = (Boolean) entry.getValue(); + edgeLabel.enableLabelIndex(enableLabelIndex); + break; + case P.TTL: + long ttl = Long.parseLong((String) entry.getValue()); + edgeLabel.ttl(ttl); + break; + case P.TT_START_TIME: + long ttlStartTime = + Long.parseLong((String) entry.getValue()); + edgeLabel.ttlStartTime(IdGenerator.of(ttlStartTime)); + break; + //case P.LINKS: + // // TODO: serialize and deserialize + // List list = (List) entry.getValue(); + // for (Map m : list) { + // for (Object key : m.keySet()) { + // Id sid = IdGenerator.of(Long.parseLong((String) key)); + // Id tid = IdGenerator.of(Long.parseLong(String.valueOf(m.get(key)))); + // edgeLabel.links(Pair.of(sid, tid)); + // } + // } + // break; + case P.SOURCE_LABEL: + long sourceLabel = + Long.parseLong((String) entry.getValue()); + edgeLabel.sourceLabel(IdGenerator.of(sourceLabel)); + break; + case P.TARGET_LABEL: + long targetLabel = + Long.parseLong((String) entry.getValue()); + edgeLabel.targetLabel(IdGenerator.of(targetLabel)); + break; + //case P.FATHER_ID: + // long fatherId = + // Long.parseLong((String) entry.getValue()); + // edgeLabel.fatherId(IdGenerator.of(fatherId)); + // break; + //case P.EDGELABEL_TYPE: + // EdgeLabelType edgeLabelType = + // EdgeLabelType.valueOf( + // ((String) entry.getValue()).toUpperCase()); + // edgeLabel.edgeLabelType(edgeLabelType); + // break; + case P.FREQUENCY: + Frequency frequency = + Frequency.valueOf(((String) entry.getValue()).toUpperCase()); + edgeLabel.frequency(frequency); + break; + case P.SORT_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.sortKeys(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for edge label", + entry.getKey())); + } + } + return edgeLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String PROPERTIES = "properties"; + public static final String NULLABLE_KEYS = "nullableKeys"; + public static final String INDEX_LABELS = "indexLabels"; + + public static final String ENABLE_LABEL_INDEX = "enableLabelIndex"; + public static final String TTL = "ttl"; + public static final String TT_START_TIME = "ttlStartTime"; + public static final String LINKS = "links"; + public static final String SOURCE_LABEL = "sourceLabel"; + public static final String TARGET_LABEL = "targetLabel"; + public static final String EDGELABEL_TYPE = "edgeLabelType"; + public static final String FATHER_ID = "fatherId"; + public static final String FREQUENCY = "frequency"; + public static final String SORT_KEYS = "sortKeys"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java index 83bde16765..e20a728c2b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java @@ -20,8 +20,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; @@ -29,6 +31,7 @@ import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IndexType; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.E; import com.google.common.base.Objects; @@ -165,6 +168,8 @@ public Object validValue(Object value) { public static IndexLabel label(HugeType type) { switch (type) { + case TASK: + case SERVER: case VERTEX: return VL_IL; case EDGE: @@ -281,4 +286,74 @@ public interface Builder extends SchemaBuilder { Builder rebuild(boolean rebuild); } + @Override + public Map asMap() { + HashMap map = new HashMap<>(); + map.put(P.BASE_TYPE, this.baseType().name()); + map.put(P.BASE_VALUE, this.baseValue().asString()); + map.put(P.INDEX_TYPE, this.indexType().name()); + map.put(P.INDEX_FIELDS, this.indexFields()); + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static IndexLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(IndexLabel.P.ID)); + String name = (String) map.get(IndexLabel.P.NAME); + + IndexLabel indexLabel = new IndexLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + indexLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + indexLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.BASE_TYPE: + HugeType hugeType = + HugeType.valueOf(((String) entry.getValue()).toUpperCase()); + indexLabel.baseType(hugeType); + break; + case P.BASE_VALUE: + long sourceLabel = + Long.parseLong((String) entry.getValue()); + indexLabel.baseValue(IdGenerator.of(sourceLabel)); + break; + case P.INDEX_TYPE: + IndexType indexType = + IndexType.valueOf(((String) entry.getValue()).toUpperCase()); + indexLabel.indexType(indexType); + break; + case P.INDEX_FIELDS: + List ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toList()); + indexLabel.indexFields(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for index label", + entry.getKey())); + } + } + return indexLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String BASE_TYPE = "baseType"; + public static final String BASE_VALUE = "baseValue"; + public static final String INDEX_TYPE = "indexType"; + public static final String INDEX_FIELDS = "indexFields"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java index 00b7968c21..64d5115d80 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -28,6 +29,7 @@ import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; @@ -35,6 +37,7 @@ import org.apache.hugegraph.type.define.AggregateType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.LongEncoding; @@ -412,4 +415,85 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + Map map = new HashMap<>(); + + if (this.dataType != null) { + map.put(P.DATA_TYPE, this.dataType.string()); + } + + if (this.cardinality != null) { + map.put(P.CARDINALITY, this.cardinality.string()); + } + + if (this.aggregateType != null) { + map.put(P.AGGREGATE_TYPE, this.aggregateType.string()); + } + + if (this.writeType != null) { + map.put(P.WRITE_TYPE, this.writeType.string()); + } + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static PropertyKey fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(P.ID)); + String name = (String) map.get(P.NAME); + + PropertyKey propertyKey = new PropertyKey(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + propertyKey.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + propertyKey.userdata(new Userdata((Map) entry.getValue())); + break; + case P.AGGREGATE_TYPE: + propertyKey.aggregateType( + AggregateType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.WRITE_TYPE: + propertyKey.writeType( + WriteType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.DATA_TYPE: + propertyKey.dataType( + DataType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.CARDINALITY: + propertyKey.cardinality( + Cardinality.valueOf(((String) entry.getValue()).toUpperCase())); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for property key", + entry.getKey())); + } + } + return propertyKey; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String DATA_TYPE = "data_type"; + public static final String CARDINALITY = "cardinality"; + + public static final String AGGREGATE_TYPE = "aggregate_type"; + public static final String WRITE_TYPE = "write_type"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java index 707029e043..966d3eed8a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java @@ -47,6 +47,11 @@ public abstract class SchemaElement implements Nameable, Typeable, protected static final int ILN_IL_ID = -6; protected static final int OLAP_VL_ID = -7; + // OLAP_ID means all of vertex label ids + public static final Id OLAP_ID = IdGenerator.of(-7); + // OLAP means all of vertex label names + public static final String OLAP = "~olap"; + public static final Id NONE_ID = IdGenerator.ZERO; public static final String UNDEF = "~undefined"; @@ -217,4 +222,31 @@ public Id task() { return this.task; } } + + public abstract Map asMap(); + + public Map asMap(Map map) { + E.checkState(this.id != null, + "Property key id can't be null"); + E.checkState(this.name != null, + "Property key name can't be null"); + E.checkState(this.status != null, + "Property status can't be null"); + + map.put(P.ID, this.id); + map.put(P.NAME, this.name); + map.put(P.STATUS, this.status.string()); + map.put(P.USERDATA, this.userdata); + + return map; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java index 8e2260c6e5..7ea46b0d7b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java @@ -21,7 +21,7 @@ import java.util.stream.Collectors; import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.schema.builder.EdgeLabelBuilder; import org.apache.hugegraph.schema.builder.IndexLabelBuilder; @@ -33,10 +33,10 @@ public class SchemaManager { - private final SchemaTransaction transaction; + private final ISchemaTransaction transaction; private HugeGraph graph; - public SchemaManager(SchemaTransaction transaction, HugeGraph graph) { + public SchemaManager(ISchemaTransaction transaction, HugeGraph graph) { E.checkNotNull(transaction, "transaction"); E.checkNotNull(graph, "graph"); this.transaction = transaction; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java index 73ca9253e6..3f62df8631 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java @@ -20,8 +20,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; @@ -29,6 +32,7 @@ import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IdStrategy; +import org.apache.hugegraph.type.define.SchemaStatus; import com.google.common.base.Objects; @@ -133,4 +137,111 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + HashMap map = new HashMap(); + + map.put(P.PROPERTIES, this.properties()); + + map.put(P.NULLABLE_KEYS, this.nullableKeys()); + + map.put(P.INDEX_LABELS, this.indexLabels()); + + map.put(P.ENABLE_LABEL_INDEX, this.enableLabelIndex()); + + map.put(P.TTL, String.valueOf(this.ttl())); + + map.put(P.TT_START_TIME, this.ttlStartTime().asString()); + + map.put(P.ID_STRATEGY, this.idStrategy().string()); + + map.put(P.PRIMARY_KEYS, this.primaryKeys()); + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static VertexLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(VertexLabel.P.ID)); + String name = (String) map.get(VertexLabel.P.NAME); + + VertexLabel vertexLabel = new VertexLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + vertexLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + vertexLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.PROPERTIES: + Set ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.properties(ids); + break; + case P.NULLABLE_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.nullableKeys(ids); + break; + case P.INDEX_LABELS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.addIndexLabels(ids.toArray(new Id[0])); + break; + case P.ENABLE_LABEL_INDEX: + boolean enableLabelIndex = (Boolean) entry.getValue(); + vertexLabel.enableLabelIndex(enableLabelIndex); + break; + case P.TTL: + long ttl = Long.parseLong((String) entry.getValue()); + vertexLabel.ttl(ttl); + break; + case P.TT_START_TIME: + long ttlStartTime = + Long.parseLong((String) entry.getValue()); + vertexLabel.ttlStartTime(IdGenerator.of(ttlStartTime)); + break; + case P.ID_STRATEGY: + IdStrategy idStrategy = + IdStrategy.valueOf(((String) entry.getValue()).toUpperCase()); + vertexLabel.idStrategy(idStrategy); + break; + case P.PRIMARY_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.primaryKeys(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for vertex label", + entry.getKey())); + } + } + return vertexLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String PROPERTIES = "properties"; + public static final String NULLABLE_KEYS = "nullableKeys"; + public static final String INDEX_LABELS = "indexLabels"; + + public static final String ENABLE_LABEL_INDEX = "enableLabelIndex"; + public static final String TTL = "ttl"; + public static final String TT_START_TIME = "ttlStartTime"; + public static final String ID_STRATEGY = "idStrategy"; + public static final String PRIMARY_KEYS = "primaryKeys"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java index fb0bb331be..fc01f0afe6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java @@ -23,6 +23,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.schema.EdgeLabel; @@ -38,10 +39,10 @@ public abstract class AbstractBuilder { - private final SchemaTransaction transaction; + private final ISchemaTransaction transaction; private final HugeGraph graph; - public AbstractBuilder(SchemaTransaction transaction, HugeGraph graph) { + public AbstractBuilder(ISchemaTransaction transaction, HugeGraph graph) { E.checkNotNull(transaction, "transaction"); E.checkNotNull(graph, "graph"); this.transaction = transaction; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java index 606695ebfd..f7aa460e1e 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java @@ -29,6 +29,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; @@ -62,7 +63,7 @@ public class EdgeLabelBuilder extends AbstractBuilder private Userdata userdata; private boolean checkExist; - public EdgeLabelBuilder(SchemaTransaction transaction, + public EdgeLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -81,7 +82,7 @@ public EdgeLabelBuilder(SchemaTransaction transaction, this.checkExist = true; } - public EdgeLabelBuilder(SchemaTransaction transaction, + public EdgeLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, EdgeLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java index b84559760b..2ca3534a99 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java @@ -29,6 +29,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.ExistedException; @@ -64,7 +65,7 @@ public class IndexLabelBuilder extends AbstractBuilder private boolean checkExist; private boolean rebuild; - public IndexLabelBuilder(SchemaTransaction transaction, + public IndexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -79,7 +80,7 @@ public IndexLabelBuilder(SchemaTransaction transaction, this.rebuild = true; } - public IndexLabelBuilder(SchemaTransaction transaction, + public IndexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, IndexLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java index 825ab4dd53..6ce6ddd847 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.ExistedException; @@ -53,7 +54,7 @@ public class PropertyKeyBuilder extends AbstractBuilder private boolean checkExist; private Userdata userdata; - public PropertyKeyBuilder(SchemaTransaction transaction, + public PropertyKeyBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -67,7 +68,7 @@ public PropertyKeyBuilder(SchemaTransaction transaction, this.checkExist = true; } - public PropertyKeyBuilder(SchemaTransaction transaction, + public PropertyKeyBuilder(ISchemaTransaction transaction, HugeGraph graph, PropertyKey copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java index db73c5491e..dfd56565d4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java @@ -29,6 +29,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.exception.ExistedException; import org.apache.hugegraph.exception.NotAllowException; @@ -59,7 +60,7 @@ public class VertexLabelBuilder extends AbstractBuilder private Userdata userdata; private boolean checkExist; - public VertexLabelBuilder(SchemaTransaction transaction, + public VertexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -76,7 +77,7 @@ public VertexLabelBuilder(SchemaTransaction transaction, this.checkExist = true; } - public VertexLabelBuilder(SchemaTransaction transaction, + public VertexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, VertexLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java new file mode 100644 index 0000000000..047bd65f53 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java @@ -0,0 +1,512 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.util.E; + +public class GraphSpace { + + public static final String DEFAULT_GRAPH_SPACE_SERVICE_NAME = "DEFAULT"; + public static final String DEFAULT_NICKNAME = "默认图空间"; + public static final String DEFAULT_GRAPH_SPACE_DESCRIPTION = + "The system default graph space"; + public static final String DEFAULT_CREATOR_NAME = "anonymous"; + + public static final int DEFAULT_CPU_LIMIT = 4; + public static final int DEFAULT_MEMORY_LIMIT = 8; + public static final int DEFAULT_STORAGE_LIMIT = 100; + + public static final int DEFAULT_MAX_GRAPH_NUMBER = 100; + public static final int DEFAULT_MAX_ROLE_NUMBER = 100; + private final String creator; + public int storageLimit; // GB + public String oltpNamespace; + private String name; + private String nickname; + private String description; + private int cpuLimit; + private int memoryLimit; // GB + private int computeCpuLimit; + private int computeMemoryLimit; // GB + private String olapNamespace; + private String storageNamespace; + private int maxGraphNumber; + private int maxRoleNumber; + private Boolean auth; + private Map configs; + private int cpuUsed; + private int memoryUsed; // GB + private int storageUsed; // GB + private int graphNumberUsed; + private int roleNumberUsed; + private String operatorImagePath = ""; // path of compute operator image + private String internalAlgorithmImageUrl = ""; + private Date createTime; + private Date updateTime; + + + public GraphSpace(String name) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + this.name = name; + this.nickname = DEFAULT_NICKNAME; + + this.maxGraphNumber = DEFAULT_MAX_GRAPH_NUMBER; + this.maxRoleNumber = DEFAULT_MAX_ROLE_NUMBER; + + this.cpuLimit = DEFAULT_CPU_LIMIT; + this.memoryLimit = DEFAULT_MEMORY_LIMIT; + this.storageLimit = DEFAULT_STORAGE_LIMIT; + + this.computeCpuLimit = DEFAULT_CPU_LIMIT; + this.computeMemoryLimit = DEFAULT_MEMORY_LIMIT; + + this.auth = false; + this.creator = DEFAULT_CREATOR_NAME; + this.configs = new HashMap<>(); + } + + public GraphSpace(String name, String nickname, String description, + int cpuLimit, + int memoryLimit, int storageLimit, int maxGraphNumber, + int maxRoleNumber, boolean auth, String creator, + Map config) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + E.checkArgument(cpuLimit > 0, "The cpu limit must > 0"); + E.checkArgument(memoryLimit > 0, "The memory limit must > 0"); + E.checkArgument(storageLimit > 0, "The storage limit must > 0"); + E.checkArgument(maxGraphNumber > 0, "The max graph number must > 0"); + this.name = name; + this.nickname = nickname; + this.description = description; + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + this.maxGraphNumber = maxGraphNumber; + this.maxRoleNumber = maxRoleNumber; + + this.auth = auth; + if (config == null) { + this.configs = new HashMap<>(); + } else { + this.configs = config; + } + + this.createTime = new Date(); + this.updateTime = this.createTime; + this.creator = creator; + } + + public GraphSpace(String name, String nickname, String description, + int cpuLimit, + int memoryLimit, int storageLimit, int maxGraphNumber, + int maxRoleNumber, String oltpNamespace, + String olapNamespace, String storageNamespace, + int cpuUsed, int memoryUsed, int storageUsed, + int graphNumberUsed, int roleNumberUsed, + boolean auth, String creator, Map config) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + E.checkArgument(cpuLimit > 0, "The cpu limit must > 0"); + E.checkArgument(memoryLimit > 0, "The memory limit must > 0"); + E.checkArgument(storageLimit > 0, "The storage limit must > 0"); + E.checkArgument(maxGraphNumber > 0, "The max graph number must > 0"); + this.name = name; + this.nickname = nickname; + this.description = description; + + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + + this.maxGraphNumber = maxGraphNumber; + this.maxRoleNumber = maxRoleNumber; + + this.oltpNamespace = oltpNamespace; + this.olapNamespace = olapNamespace; + this.storageNamespace = storageNamespace; + + this.cpuUsed = cpuUsed; + this.memoryUsed = memoryUsed; + this.storageUsed = storageUsed; + + this.graphNumberUsed = graphNumberUsed; + this.roleNumberUsed = roleNumberUsed; + + this.auth = auth; + this.creator = creator; + + this.configs = new HashMap<>(); + if (config != null) { + this.configs = config; + } + } + + public String name() { + return this.name; + } + + public void name(String name) { + this.name = name; + } + + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + public int cpuLimit() { + return this.cpuLimit; + } + + public void cpuLimit(int cpuLimit) { + E.checkArgument(cpuLimit > 0, + "The cpu limit must be > 0, but got: %s", cpuLimit); + this.cpuLimit = cpuLimit; + } + + public int memoryLimit() { + return this.memoryLimit; + } + + public void memoryLimit(int memoryLimit) { + E.checkArgument(memoryLimit > 0, + "The memory limit must be > 0, but got: %s", + memoryLimit); + this.memoryLimit = memoryLimit; + } + + public int storageLimit() { + return this.storageLimit; + } + + public void storageLimit(int storageLimit) { + E.checkArgument(storageLimit > 0, + "The storage limit must be > 0, but got: %s", + storageLimit); + this.storageLimit = storageLimit; + } + + public void setStorageUsed(int storageUsed) { + this.storageUsed = storageUsed; + } + + public int computeCpuLimit() { + return this.computeCpuLimit; + } + + public void computeCpuLimit(int computeCpuLimit) { + E.checkArgument(computeCpuLimit >= 0, + "The compute cpu limit must be >= 0, but got: %s", computeCpuLimit); + this.computeCpuLimit = computeCpuLimit; + } + + public int computeMemoryLimit() { + return this.computeMemoryLimit; + } + + public void computeMemoryLimit(int computeMemoryLimit) { + E.checkArgument(computeMemoryLimit >= 0, + "The compute memory limit must be >= 0, but got: %s", + computeMemoryLimit); + this.computeMemoryLimit = computeMemoryLimit; + } + + public String oltpNamespace() { + return this.oltpNamespace; + } + + public void oltpNamespace(String oltpNamespace) { + this.oltpNamespace = oltpNamespace; + } + + public String olapNamespace() { + return this.olapNamespace; + } + + public void olapNamespace(String olapNamespace) { + this.olapNamespace = olapNamespace; + } + + public String storageNamespace() { + return this.storageNamespace; + } + + public void storageNamespace(String storageNamespace) { + this.storageNamespace = storageNamespace; + } + + public int maxGraphNumber() { + return this.maxGraphNumber; + } + + public void maxGraphNumber(int maxGraphNumber) { + this.maxGraphNumber = maxGraphNumber; + } + + public int maxRoleNumber() { + return this.maxRoleNumber; + } + + public void maxRoleNumber(int maxRoleNumber) { + this.maxRoleNumber = maxRoleNumber; + } + + public int graphNumberUsed() { + return this.graphNumberUsed; + } + + public void graphNumberUsed(int graphNumberUsed) { + this.graphNumberUsed = graphNumberUsed; + } + + public int roleNumberUsed() { + return this.roleNumberUsed; + } + + public void roleNumberUsed(int roleNumberUsed) { + this.roleNumberUsed = roleNumberUsed; + } + + public boolean auth() { + return this.auth; + } + + public void auth(boolean auth) { + this.auth = auth; + } + + public Map configs() { + return this.configs; + } + + public void configs(Map configs) { + this.configs.putAll(configs); + } + + public void operatorImagePath(String path) { + this.operatorImagePath = path; + } + + public String operatorImagePath() { + return this.operatorImagePath; + } + + public void internalAlgorithmImageUrl(String url) { + if (StringUtils.isNotBlank(url)) { + this.internalAlgorithmImageUrl = url; + } + } + + public String internalAlgorithmImageUrl() { + return this.internalAlgorithmImageUrl; + } + + public Date createTime() { + return this.createTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public String creator() { + return this.creator; + } + + public void updateTime(Date update) { + this.updateTime = update; + } + + public void createTime(Date create) { + this.createTime = create; + } + + public void refreshUpdate() { + this.updateTime = new Date(); + } + + public Map info() { + Map infos = new LinkedHashMap<>(); + infos.put("name", this.name); + infos.put("nickname", this.nickname); + infos.put("description", this.description); + + infos.put("cpu_limit", this.cpuLimit); + infos.put("memory_limit", this.memoryLimit); + infos.put("storage_limit", this.storageLimit); + + infos.put("compute_cpu_limit", this.computeCpuLimit); + infos.put("compute_memory_limit", this.computeMemoryLimit); + + infos.put("oltp_namespace", this.oltpNamespace); + infos.put("olap_namespace", this.olapNamespace); + infos.put("storage_namespace", this.storageNamespace); + + infos.put("max_graph_number", this.maxGraphNumber); + infos.put("max_role_number", this.maxRoleNumber); + + infos.putAll(this.configs); + // sources used info is not automatically updated, it could be + // updated by pdClient of GraphManager + infos.put("cpu_used", this.cpuUsed); + infos.put("memory_used", this.memoryUsed); + infos.put("storage_used", this.storageUsed); + float storageUserPercent = Float.parseFloat( + String.format("%.2f", (float) this.storageUsed / + ((float) this.storageLimit * 1.0))); + infos.put("storage_percent", storageUserPercent); + infos.put("graph_number_used", this.graphNumberUsed); + infos.put("role_number_used", this.roleNumberUsed); + + infos.put("auth", this.auth); + + infos.put("operator_image_path", this.operatorImagePath); + infos.put("internal_algorithm_image_url", this.internalAlgorithmImageUrl); + + infos.put("create_time", this.createTime); + infos.put("update_time", this.updateTime); + infos.put("creator", this.creator); + return infos; + } + + private synchronized void incrCpuUsed(int acquiredCount) { + if (acquiredCount < 0) { + throw new HugeException("cannot increase cpu used since acquired count is negative"); + } + this.cpuUsed += acquiredCount; + } + + private synchronized void decrCpuUsed(int releasedCount) { + if (releasedCount < 0) { + throw new HugeException("cannot decrease cpu used since released count is negative"); + } + if (cpuUsed < releasedCount) { + cpuUsed = 0; + } else { + this.cpuUsed -= releasedCount; + } + } + + private synchronized void incrMemoryUsed(int acquiredCount) { + if (acquiredCount < 0) { + throw new HugeException("cannot increase memory used since acquired count is negative"); + } + this.memoryUsed += acquiredCount; + } + + private synchronized void decrMemoryUsed(int releasedCount) { + if (releasedCount < 0) { + throw new HugeException("cannot decrease memory used since released count is negative"); + } + if (memoryUsed < releasedCount) { + this.memoryUsed = 0; + } else { + this.memoryUsed -= releasedCount; + } + } + + /** + * Only limit the resource usage for oltp service under k8s + * + * @param service + * @return + */ + public boolean tryOfferResourceFor(Service service) { + if (!service.k8s()) { + return true; + } + int count = service.count(); + int leftCpu = this.cpuLimit - this.cpuUsed; + int leftMemory = this.memoryLimit - this.memoryUsed; + int acquiredCpu = service.cpuLimit() * count; + int acquiredMemory = service.memoryLimit() * count; + if (acquiredCpu > leftCpu || + acquiredMemory > leftMemory) { + return false; + } + this.incrCpuUsed(acquiredCpu); + this.incrMemoryUsed(acquiredMemory); + return true; + } + + public void recycleResourceFor(Service service) { + int count = service.count(); + this.decrCpuUsed(service.cpuLimit() * count); + this.decrMemoryUsed(service.memoryLimit() * count); + } + + public boolean tryOfferGraph() { + return this.tryOfferGraph(1); + } + + public boolean tryOfferGraph(int count) { + if (this.graphNumberUsed + count > this.maxGraphNumber) { + return false; + } + this.graphNumberUsed += count; + return true; + } + + public void recycleGraph() { + this.recycleGraph(1); + } + + public void recycleGraph(int count) { + this.graphNumberUsed -= count; + } + + public boolean tryOfferRole() { + return this.tryOfferRole(1); + } + + public boolean tryOfferRole(int count) { + if (this.roleNumberUsed + count > this.maxRoleNumber) { + return false; + } + this.roleNumberUsed += count; + return true; + } + + public void recycleRole() { + this.recycleRole(1); + } + + public void recycleRole(int count) { + this.roleNumberUsed -= count; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java new file mode 100644 index 0000000000..d0f5d6c3d4 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Map; + +import org.apache.hugegraph.util.E; + +import com.google.common.collect.ImmutableMap; + +public class SchemaTemplate { + + public static SimpleDateFormat FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + protected Date createTime; + protected Date updateTime; + protected String creator; + private final String name; + private String schema; + + public SchemaTemplate(String name, String schema) { + E.checkArgument(name != null && !name.isEmpty(), + "The name of schema template can't be null or empty"); + E.checkArgument(schema != null && !schema.isEmpty(), + "The schema template can't be null or empty"); + this.name = name; + this.schema = schema; + this.createTime = new Date(); + this.updateTime = createTime; + } + + public SchemaTemplate(String name, String schema, Date create, String creator) { + E.checkArgument(name != null && !name.isEmpty(), + "The name of schema template can't be null or empty"); + E.checkArgument(schema != null && !schema.isEmpty(), + "The schema template can't be null or empty"); + this.name = name; + this.schema = schema; + this.createTime = create; + this.updateTime = createTime; + + this.creator = creator; + } + + public static SchemaTemplate fromMap(Map map) { + try { + SchemaTemplate template = new SchemaTemplate(map.get("name"), + map.get("schema"), + FORMATTER.parse(map.get("create")), + map.get("creator")); + + template.updateTime(FORMATTER.parse(map.get("update"))); + return template; + + } catch (ParseException e) { + e.printStackTrace(); + } + + return null; + } + + public String name() { + return this.name; + } + + public String schema() { + return this.schema; + } + + public void schema(String schema) { + this.schema = schema; + } + + public Date create() { + return this.createTime; + } + + public Date createTime() { + return this.createTime; + } + + public Date update() { + return this.updateTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public void create(Date create) { + this.createTime = create; + } + + public String creator() { + return this.creator; + } + + public void creator(String creator) { + this.creator = creator; + } + + public void updateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public void refreshUpdateTime() { + this.updateTime = new Date(); + } + + public Map asMap() { + String createStr = FORMATTER.format(this.createTime); + String updateStr = FORMATTER.format(this.updateTime); + return new ImmutableMap.Builder() + .put("name", this.name) + .put("schema", this.schema) + .put("create", createStr) + .put("create_time", createStr) + .put("update", updateStr) + .put("update_time", updateStr) + .put("creator", this.creator) + .build(); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java new file mode 100644 index 0000000000..bfd3fe6b14 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java @@ -0,0 +1,361 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.util.Date; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.util.E; + +public class Service { + + public static final int DEFAULT_COUNT = 1; + public static final String DEFAULT_ROUTE_TYPE = "NodePort"; + public static final int DEFAULT_PORT = 0; + + public static final int DEFAULT_CPU_LIMIT = 4; + public static final int DEFAULT_MEMORY_LIMIT = 8; + public static final int DEFAULT_STORAGE_LIMIT = 100; + private final String creator; + private String name; + private ServiceType type; + private DeploymentType deploymentType; + private String description; + private Status status; + private int count; + private int running; + private int cpuLimit; + private int memoryLimit; // GB + private int storageLimit; // GB + private String routeType; + private int port; + private Set urls = new HashSet<>(); + private Set serverDdsUrls = new HashSet<>(); + private Set serverNodePortUrls = new HashSet<>(); + private String serviceId; + private String pdServiceId; + private Date createTime; + private Date updateTime; + + public Service(String name, String creator, ServiceType type, + DeploymentType deploymentType) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of service can't be null or empty"); + E.checkArgumentNotNull(type, "The type of service can't be null"); + E.checkArgumentNotNull(deploymentType, + "The deployment type of service can't be null"); + this.name = name; + this.type = type; + this.deploymentType = deploymentType; + this.status = Status.UNKNOWN; + this.count = DEFAULT_COUNT; + this.running = 0; + this.routeType = DEFAULT_ROUTE_TYPE; + this.port = DEFAULT_PORT; + this.cpuLimit = DEFAULT_CPU_LIMIT; + this.memoryLimit = DEFAULT_MEMORY_LIMIT; + this.storageLimit = DEFAULT_STORAGE_LIMIT; + + this.creator = creator; + this.createTime = new Date(); + this.updateTime = this.createTime; + } + + public Service(String name, String creator, String description, ServiceType type, + DeploymentType deploymentType, int count, int running, + int cpuLimit, int memoryLimit, int storageLimit, + String routeType, int port, Set urls) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of service can't be null or empty"); + E.checkArgumentNotNull(type, "The type of service can't be null"); + this.name = name; + this.description = description; + this.type = type; + this.status = Status.UNKNOWN; + this.deploymentType = deploymentType; + this.count = count; + this.running = running; + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + this.routeType = routeType; + this.port = port; + this.urls = urls; + + this.creator = creator; + this.createTime = new Date(); + this.updateTime = this.createTime; + } + + public String name() { + return this.name; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + public ServiceType type() { + return this.type; + } + + public void type(ServiceType type) { + this.type = type; + } + + public DeploymentType deploymentType() { + return this.deploymentType; + } + + public void deploymentType(DeploymentType deploymentType) { + this.deploymentType = deploymentType; + } + + public Status status() { + return this.status; + } + + public void status(Status status) { + this.status = status; + } + + public int count() { + return this.count; + } + + public void count(int count) { + E.checkArgument(count > 0, + "The service count must be > 0, but got: %s", count); + this.count = count; + } + + public int running() { + return this.running; + } + + public void running(int running) { + E.checkArgument(running <= this.count, + "The running count must be < count %s, but got: %s", + this.count, running); + this.running = running; + } + + public int cpuLimit() { + return this.cpuLimit; + } + + public void cpuLimit(int cpuLimit) { + E.checkArgument(cpuLimit > 0, + "The cpu limit must be > 0, but got: %s", cpuLimit); + this.cpuLimit = cpuLimit; + } + + public int memoryLimit() { + return this.memoryLimit; + } + + public void memoryLimit(int memoryLimit) { + E.checkArgument(memoryLimit > 0, + "The memory limit must be > 0, but got: %s", + memoryLimit); + this.memoryLimit = memoryLimit; + } + + public int storageLimit() { + return this.storageLimit; + } + + public void storageLimit(int storageLimit) { + E.checkArgument(storageLimit > 0, + "The storage limit must be > 0, but got: %s", + storageLimit); + this.storageLimit = storageLimit; + } + + public String routeType() { + return this.routeType; + } + + public void routeType(String routeType) { + this.routeType = routeType; + } + + public int port() { + return this.port; + } + + public void port(int port) { + this.port = port; + } + + public Set urls() { + if (this.urls == null) { + this.urls = new HashSet<>(); + } + return this.urls; + } + + public void urls(Set urls) { + this.urls = urls; + } + + public Set serverDdsUrls() { + if (this.serverDdsUrls == null) { + this.serverDdsUrls = new HashSet<>(); + } + return this.serverDdsUrls; + } + + public void serverDdsUrls(Set urls) { + this.serverDdsUrls = urls; + } + + public Set serverNodePortUrls() { + if (this.serverNodePortUrls == null) { + this.serverNodePortUrls = new HashSet<>(); + } + return this.serverNodePortUrls; + } + + public void serverNodePortUrls(Set urls) { + this.serverNodePortUrls = urls; + } + + public void url(String url) { + if (this.urls == null) { + this.urls = new HashSet<>(); + } + this.urls.add(url); + } + + public boolean manual() { + return DeploymentType.MANUAL.equals(this.deploymentType); + } + + public boolean k8s() { + return DeploymentType.K8S.equals(this.deploymentType); + } + + public String creator() { + return this.creator; + } + + public Date createdTime() { + return this.createTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public void createTime(Date create) { + this.createTime = create; + } + + public void updateTime(Date update) { + this.updateTime = update; + } + + public void refreshUpdate() { + this.updateTime = new Date(); + } + + public boolean sameService(Service other) { + if (other.deploymentType == DeploymentType.K8S || + this.deploymentType == DeploymentType.K8S) { + return true; + } + return (this.name.equals(other.name) && + this.type.equals(other.type) && + this.deploymentType == other.deploymentType && + this.urls.equals(other.urls) && + this.port == other.port); + } + + public Map info() { + Map infos = new LinkedHashMap<>(); + infos.put("name", this.name); + infos.put("type", this.type); + infos.put("deployment_type", this.deploymentType); + infos.put("description", this.description); + infos.put("status", this.status); + infos.put("count", this.count); + infos.put("running", this.running); + + infos.put("cpu_limit", this.cpuLimit); + infos.put("memory_limit", this.memoryLimit); + infos.put("storage_limit", this.storageLimit); + + infos.put("route_type", this.routeType); + infos.put("port", this.port); + infos.put("urls", this.urls); + infos.put("server_dds_urls", this.serverDdsUrls); + infos.put("server_node_port_urls", this.serverNodePortUrls); + + infos.put("service_id", this.serviceId); + infos.put("pd_service_id", this.pdServiceId); + + infos.put("creator", this.creator); + infos.put("create_time", this.createTime); + infos.put("update_time", this.updateTime); + + return infos; + } + + public String serviceId() { + return this.serviceId; + } + + public void serviceId(String serviceId) { + this.serviceId = serviceId; + } + + public String pdServiceId() { + return this.pdServiceId; + } + + public void pdServiceId(String serviceId) { + this.pdServiceId = serviceId; + } + + public enum DeploymentType { + MANUAL, + K8S, + } + + public enum ServiceType { + OLTP, + OLAP, + STORAGE + } + + public enum Status { + UNKNOWN, + STARTING, + RUNNING, + STOPPED + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java index 12b0e3fc71..53b8a54933 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java @@ -540,4 +540,69 @@ public static HugeEdge constructEdge(HugeVertex ownerVertex, return edge; } + + public static HugeEdge constructEdgeWithoutLabel(HugeVertex ownerVertex, + boolean isOutEdge, + String sortValues, + Id otherVertexId) { + HugeGraph graph = ownerVertex.graph(); + HugeVertex otherVertex = new HugeVertex(graph, otherVertexId, + VertexLabel.NONE); + ownerVertex.propNotLoaded(); + otherVertex.propNotLoaded(); + + HugeEdge edge = new HugeEdge(graph, null, EdgeLabel.NONE); + edge.name(sortValues); + edge.vertices(isOutEdge, ownerVertex, otherVertex); + edge.assignId(); + + if (isOutEdge) { + ownerVertex.addOutEdge(edge); + otherVertex.addInEdge(edge.switchOwner()); + } else { + ownerVertex.addInEdge(edge); + otherVertex.addOutEdge(edge.switchOwner()); + } + + return edge; + } + + public static HugeEdge constructEdgeWithoutGraph(HugeVertex ownerVertex, + boolean isOutEdge, + EdgeLabel edgeLabel, + String sortValues, + Id otherVertexId) { + Id ownerLabelId = edgeLabel.sourceLabel(); + Id otherLabelId = edgeLabel.targetLabel(); + VertexLabel srcLabel = new VertexLabel(null, ownerLabelId, "UNDEF"); + VertexLabel tgtLabel = new VertexLabel(null, otherLabelId, "UNDEF"); + + VertexLabel otherVertexLabel; + if (isOutEdge) { + ownerVertex.correctVertexLabel(srcLabel); + otherVertexLabel = tgtLabel; + } else { + ownerVertex.correctVertexLabel(tgtLabel); + otherVertexLabel = srcLabel; + } + HugeVertex otherVertex = new HugeVertex(null, otherVertexId, + otherVertexLabel); + ownerVertex.propNotLoaded(); + otherVertex.propNotLoaded(); + + HugeEdge edge = new HugeEdge(null, null, edgeLabel); + edge.name(sortValues); + edge.vertices(isOutEdge, ownerVertex, otherVertex); + edge.assignId(); + + if (isOutEdge) { + ownerVertex.addOutEdge(edge); + otherVertex.addInEdge(edge.switchOwner()); + } else { + ownerVertex.addInEdge(edge); + otherVertex.addOutEdge(edge.switchOwner()); + } + + return edge; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java index 4d43840934..fa442e1298 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java @@ -44,6 +44,7 @@ public class HugeIndex implements GraphType, Cloneable { private Object fieldValues; private IndexLabel indexLabel; private Set elementIds; + private static final int HUGE_TYPE_CODE_LENGTH = 1; public HugeIndex(HugeGraph graph, IndexLabel indexLabel) { E.checkNotNull(graph, "graph"); @@ -210,11 +211,12 @@ public static Id formatIndexId(HugeType type, Id indexLabelId, * index label in front(hugegraph-1317) */ String strIndexLabelId = IdGenerator.asStoredString(indexLabelId); - return SplicingIdGenerator.splicing(strIndexLabelId, value); + return SplicingIdGenerator.splicing(type.string(), strIndexLabelId, value); } else { assert type.isRangeIndex(); int length = type.isRange4Index() ? 4 : 8; - BytesBuffer buffer = BytesBuffer.allocate(4 + length); + BytesBuffer buffer = BytesBuffer.allocate(HUGE_TYPE_CODE_LENGTH + 4 + length); + buffer.write(type.code()); buffer.writeInt(SchemaElement.schemaId(indexLabelId)); if (fieldValues != null) { E.checkState(fieldValues instanceof Number, @@ -234,15 +236,16 @@ public static HugeIndex parseIndexId(HugeGraph graph, HugeType type, if (type.isStringIndex()) { Id idObject = IdGenerator.of(id, IdType.STRING); String[] parts = SplicingIdGenerator.parse(idObject); - E.checkState(parts.length == 2, "Invalid secondary index id"); - Id label = IdGenerator.ofStoredString(parts[0], IdType.LONG); + E.checkState(parts.length == 3, "Invalid secondary index id"); + Id label = IdGenerator.ofStoredString(parts[1], IdType.LONG); indexLabel = IndexLabel.label(graph, label); - values = parts[1]; + values = parts[2]; } else { assert type.isRange4Index() || type.isRange8Index(); final int labelLength = 4; E.checkState(id.length > labelLength, "Invalid range index id"); BytesBuffer buffer = BytesBuffer.wrap(id); + buffer.read(HUGE_TYPE_CODE_LENGTH); Id label = IdGenerator.of(buffer.readInt()); indexLabel = IndexLabel.label(graph, label); List fields = indexLabel.indexFields(); @@ -252,7 +255,7 @@ public static HugeIndex parseIndexId(HugeGraph graph, HugeType type, "Invalid range index field type"); Class clazz = dataType.isNumber() ? dataType.clazz() : DataType.LONG.clazz(); - values = bytes2number(buffer.read(id.length - labelLength), clazz); + values = bytes2number(buffer.read(id.length - labelLength - HUGE_TYPE_CODE_LENGTH), clazz); } HugeIndex index = new HugeIndex(graph, indexLabel); index.fieldValues(values); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java index aca200700a..4726e88e5b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java @@ -39,10 +39,14 @@ import org.apache.hugegraph.backend.serializer.BytesBuffer; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.masterelection.StandardClusterRoleStore; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.task.HugeServerInfo; +import org.apache.hugegraph.task.HugeTask; +import org.apache.hugegraph.task.HugeTaskResult; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.CollectionType; @@ -90,6 +94,16 @@ public HugeVertex(final HugeGraph graph, Id id, VertexLabel label) { @Override public HugeType type() { + if (label != null && + (label.name().equals(HugeTask.P.TASK) || + label.name().equals(HugeTaskResult.P.TASKRESULT))) { + return HugeType.TASK; + } + if (label != null && + (label.name().equals(HugeServerInfo.P.SERVER) || + label.name().equals(StandardClusterRoleStore.P.ROLE_DATA))) { + return HugeType.SERVER; + } return HugeType.VERTEX; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/DistributedTaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/DistributedTaskScheduler.java new file mode 100644 index 0000000000..c29d7a3b82 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/DistributedTaskScheduler.java @@ -0,0 +1,652 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.task; + +import java.util.Iterator; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.exception.ConnectionException; +import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.LockUtil; +import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + +public class DistributedTaskScheduler extends TaskAndResultScheduler { + private final long schedulePeriod; + private static final Logger LOG = Log.logger(DistributedTaskScheduler.class); + private final ExecutorService taskDbExecutor; + private final ExecutorService schemaTaskExecutor; + private final ExecutorService olapTaskExecutor; + private final ExecutorService ephemeralTaskExecutor; + private final ExecutorService gremlinTaskExecutor; + private final ScheduledThreadPoolExecutor schedulerExecutor; + private final ScheduledFuture cronFuture; + + /** + * the status of scheduler + */ + private final AtomicBoolean closed = new AtomicBoolean(true); + + private final ConcurrentHashMap> runningTasks = new ConcurrentHashMap<>(); + + public DistributedTaskScheduler(HugeGraphParams graph, + ScheduledThreadPoolExecutor schedulerExecutor, + ExecutorService taskDbExecutor, + ExecutorService schemaTaskExecutor, + ExecutorService olapTaskExecutor, + ExecutorService gremlinTaskExecutor, + ExecutorService ephemeralTaskExecutor, + ExecutorService serverInfoDbExecutor) { + super(graph, serverInfoDbExecutor); + + this.taskDbExecutor = taskDbExecutor; + this.schemaTaskExecutor = schemaTaskExecutor; + this.olapTaskExecutor = olapTaskExecutor; + this.gremlinTaskExecutor = gremlinTaskExecutor; + this.ephemeralTaskExecutor = ephemeralTaskExecutor; + + this.schedulerExecutor = schedulerExecutor; + + this.closed.set(false); + + this.schedulePeriod = this.graph.configuration() + .get(CoreOptions.TASK_SCHEDULE_PERIOD); + + this.cronFuture = this.schedulerExecutor.scheduleWithFixedDelay( + () -> { + // TODO: uncomment later - graph space + // LockUtil.lock(this.graph().spaceGraphName(), LockUtil.GRAPH_LOCK); + LockUtil.lock("", LockUtil.GRAPH_LOCK); + try { + // TODO: 使用超级管理员权限,查询任务 + // TaskManager.useAdmin(); + this.cronSchedule(); + } catch (Throwable t) { + // TODO: log with graph space + LOG.info("cronScheduler exception graph: {}", this.graphName(), t); + } finally { + // TODO: uncomment later - graph space + LockUtil.unlock("", LockUtil.GRAPH_LOCK); + // LockUtil.unlock(this.graph().spaceGraphName(), LockUtil.GRAPH_LOCK); + } + }, + 10L, schedulePeriod, + TimeUnit.SECONDS); + } + + private static boolean sleep(long ms) { + try { + Thread.sleep(ms); + return true; + } catch (InterruptedException ignored) { + // Ignore InterruptedException + return false; + } + } + + public void cronSchedule() { + // 执行周期调度任务 + + if (!this.graph.started() || this.graph.closed()) { + return; + } + + // 处理 NEW 状态的任务 + Iterator> news = queryTaskWithoutResultByStatus( + TaskStatus.NEW); + + while (!this.closed.get() && news.hasNext()) { + HugeTask newTask = news.next(); + LOG.info("Try to start task({})@({}/{})", newTask.id(), + this.graphSpace, this.graphName); + if (!tryStartHugeTask(newTask)) { + // 任务提交失败时,线程池已打满 + break; + } + } + + // 处理 RUNNING 状态的任务 + Iterator> runnings = + queryTaskWithoutResultByStatus(TaskStatus.RUNNING); + + while (!this.closed.get() && runnings.hasNext()) { + HugeTask running = runnings.next(); + initTaskParams(running); + if (!isLockedTask(running.id().toString())) { + LOG.info("Try to update task({})@({}/{}) status" + + "(RUNNING->FAILED)", running.id(), this.graphSpace, + this.graphName); + if (updateStatusWithLock(running.id(), TaskStatus.RUNNING, + TaskStatus.FAILED)) { + runningTasks.remove(running.id()); + } else { + LOG.warn("Update task({})@({}/{}) status" + + "(RUNNING->FAILED) failed", + running.id(), this.graphSpace, this.graphName); + } + } + } + + // 处理 FAILED/HANGING 状态的任务 + Iterator> faileds = + queryTaskWithoutResultByStatus(TaskStatus.FAILED); + + while (!this.closed.get() && faileds.hasNext()) { + HugeTask failed = faileds.next(); + initTaskParams(failed); + if (failed.retries() < this.graph().option(CoreOptions.TASK_RETRY)) { + LOG.info("Try to update task({})@({}/{}) status(FAILED->NEW)", + failed.id(), this.graphSpace, this.graphName); + updateStatusWithLock(failed.id(), TaskStatus.FAILED, + TaskStatus.NEW); + } + } + + // 处理 CANCELLING 状态的任务 + Iterator> cancellings = queryTaskWithoutResultByStatus( + TaskStatus.CANCELLING); + + while (!this.closed.get() && cancellings.hasNext()) { + Id cancellingId = cancellings.next().id(); + if (runningTasks.containsKey(cancellingId)) { + HugeTask cancelling = runningTasks.get(cancellingId); + initTaskParams(cancelling); + LOG.info("Try to cancel task({})@({}/{})", + cancelling.id(), this.graphSpace, this.graphName); + cancelling.cancel(true); + + runningTasks.remove(cancellingId); + } else { + // 本地没有执行任务,但是当前任务已经无节点在执行 + if (!isLockedTask(cancellingId.toString())) { + updateStatusWithLock(cancellingId, TaskStatus.CANCELLING, + TaskStatus.CANCELLED); + } + } + } + + // 处理 DELETING 状态的任务 + Iterator> deletings = queryTaskWithoutResultByStatus( + TaskStatus.DELETING); + + while (!this.closed.get() && deletings.hasNext()) { + Id deletingId = deletings.next().id(); + if (runningTasks.containsKey(deletingId)) { + HugeTask deleting = runningTasks.get(deletingId); + initTaskParams(deleting); + deleting.cancel(true); + + // 删除存储信息 + deleteFromDB(deletingId); + + runningTasks.remove(deletingId); + } else { + // 本地没有执行任务,但是当前任务已经无节点在执行 + if (!isLockedTask(deletingId.toString())) { + deleteFromDB(deletingId); + } + } + } + } + + protected Iterator> queryTaskWithoutResultByStatus(TaskStatus status) { + if (this.closed.get()) { + return QueryResults.emptyIterator(); + } + return queryTaskWithoutResult(HugeTask.P.STATUS, status.code(), NO_LIMIT, null); + } + + @Override + public HugeGraph graph() { + return this.graph.graph(); + } + + @Override + public int pendingTasks() { + return this.runningTasks.size(); + } + + @Override + public void restoreTasks() { + // DO Nothing! + } + + @Override + public Future schedule(HugeTask task) { + E.checkArgumentNotNull(task, "Task can't be null"); + + initTaskParams(task); + + if (task.ephemeralTask()) { + // 处理 ephemeral 任务,不需要调度,直接执行 + return this.ephemeralTaskExecutor.submit(task); + } + + // 处理 schema 任务 + // 处理 gremlin 任务 + // 处理 olap 计算任务 + // 添加任务到 DB,当前任务状态为 NEW + // TODO: save server id for task + this.save(task); + + if (!this.closed.get()) { + LOG.info("Try to start task({})@({}/{}) immediately", task.id(), + this.graphSpace, this.graphName); + tryStartHugeTask(task); + } else { + LOG.info("TaskScheduler has closed"); + } + + return null; + } + + protected void initTaskParams(HugeTask task) { + // 绑定当前任务执行所需的环境变量 + // 在任务反序列化和执行之前,均需要调用该方法 + task.scheduler(this); + TaskCallable callable = task.callable(); + callable.task(task); + callable.graph(this.graph()); + + if (callable instanceof TaskCallable.SysTaskCallable) { + ((TaskCallable.SysTaskCallable) callable).params(this.graph); + } + } + + @Override + public void cancel(HugeTask task) { + // 更新状态为 CANCELLING + if (!task.completed()) { + // 任务未完成,才可执行状态未 CANCELLING + this.updateStatus(task.id(), null, TaskStatus.CANCELLING); + } else { + LOG.info("cancel task({}) error, task has completed", task.id()); + } + } + + @Override + public void init() { + this.call(() -> this.tx().initSchema()); + } + + protected HugeTask deleteFromDB(Id id) { + // 从 DB 中删除 Task,不检查任务状态 + return this.call(() -> { + Iterator vertices = this.tx().queryTaskInfos(id); + HugeVertex vertex = (HugeVertex) QueryResults.one(vertices); + if (vertex == null) { + return null; + } + HugeTask result = HugeTask.fromVertex(vertex); + this.tx().removeVertex(vertex); + return result; + }); + } + + @Override + public HugeTask delete(Id id, boolean force) { + if (!force) { + // 更改状态为 DELETING,通过自动调度实现删除操作 + this.updateStatus(id, null, TaskStatus.DELETING); + return null; + } else { + return this.deleteFromDB(id); + } + } + + @Override + public boolean close() { + if (this.closed.get()) { + return true; + } + + // set closed + this.closed.set(true); + + // cancel all running tasks + for (HugeTask task : this.runningTasks.values()) { + LOG.info("cancel task({}) @({}/{}) when closing scheduler", + task.id(), graphSpace, graphName); + this.cancel(task); + } + + try { + this.waitUntilAllTasksCompleted(10); + } catch (TimeoutException e) { + LOG.warn("Tasks not completed when close distributed task scheduler", e); + } + + // cancel cron thread + if (!cronFuture.isDone() && !cronFuture.isCancelled()) { + cronFuture.cancel(false); + } + + if (!this.taskDbExecutor.isShutdown()) { + this.call(() -> { + try { + this.tx().close(); + } catch (ConnectionException ignored) { + // ConnectionException means no connection established + } + this.graph.closeTx(); + }); + } + return true; + } + + @Override + public HugeTask waitUntilTaskCompleted(Id id, long seconds) + throws TimeoutException { + return this.waitUntilTaskCompleted(id, seconds, QUERY_INTERVAL); + } + + @Override + public HugeTask waitUntilTaskCompleted(Id id) + throws TimeoutException { + // This method is just used by tests + long timeout = this.graph.configuration() + .get(CoreOptions.TASK_WAIT_TIMEOUT); + return this.waitUntilTaskCompleted(id, timeout, 1L); + } + + private HugeTask waitUntilTaskCompleted(Id id, long seconds, + long intervalMs) + throws TimeoutException { + long passes = seconds * 1000 / intervalMs; + HugeTask task = null; + for (long pass = 0; ; pass++) { + try { + task = this.taskWithoutResult(id); + } catch (NotFoundException e) { + if (task != null && task.completed()) { + assert task.id().asLong() < 0L : task.id(); + sleep(intervalMs); + return task; + } + throw e; + } + if (task.completed()) { + // Wait for task result being set after status is completed + sleep(intervalMs); + // 查询带有结果的任务信息 + task = this.task(id); + return task; + } + if (pass >= passes) { + break; + } + sleep(intervalMs); + } + throw new TimeoutException(String.format( + "Task '%s' was not completed in %s seconds", id, seconds)); + } + + @Override + public void waitUntilAllTasksCompleted(long seconds) + throws TimeoutException { + long passes = seconds * 1000 / QUERY_INTERVAL; + int taskSize = 0; + for (long pass = 0; ; pass++) { + taskSize = this.pendingTasks(); + if (taskSize == 0) { + sleep(QUERY_INTERVAL); + return; + } + if (pass >= passes) { + break; + } + sleep(QUERY_INTERVAL); + } + throw new TimeoutException(String.format( + "There are still %s incomplete tasks after %s seconds", + taskSize, seconds)); + + } + + @Override + public void checkRequirement(String op) { + if (!this.serverManager().selfIsMaster()) { + throw new HugeException("Can't %s task on non-master server", op); + } + } + + @Override + public V call(Callable callable) { + return this.call(callable, this.taskDbExecutor); + } + + @Override + public V call(Runnable runnable) { + return this.call(Executors.callable(runnable, null)); + } + + private V call(Callable callable, ExecutorService executor) { + try { + callable = new TaskManager.ContextCallable<>(callable); + return executor.submit(callable).get(); + } catch (Exception e) { + throw new HugeException("Failed to update/query TaskStore for " + + "graph(%s/%s): %s", e, this.graphSpace, + this.graph.name(), e.toString()); + } + } + + protected boolean updateStatus(Id id, TaskStatus prestatus, + TaskStatus status) { + HugeTask task = this.taskWithoutResult(id); + initTaskParams(task); + if (prestatus == null || task.status() == prestatus) { + task.overwriteStatus(status); + // 如果状态更新为 FAILED -> NEW,则增加重试次数 + if (prestatus == TaskStatus.FAILED && status == TaskStatus.NEW) { + task.retry(); + } + this.save(task); + LOG.info("Update task({}) success: pre({}), status({})", + id, prestatus, status); + + return true; + } else { + LOG.warn("Update task({}) status conflict: current({}), " + + "pre({}), status({})", id, task.status(), + prestatus, status); + return false; + } + } + + protected boolean updateStatusWithLock(Id id, TaskStatus prestatus, + TaskStatus status) { + + LockResult lockResult = tryLockTask(id.asString()); + + if (lockResult.lockSuccess()) { + try { + return updateStatus(id, prestatus, status); + } finally { + unlockTask(id.asString(), lockResult); + } + } + + return false; + } + + /** + * try to start task; + * + * @param task + * @return true if the task have start + */ + private boolean tryStartHugeTask(HugeTask task) { + // Print Scheduler status + logCurrentState(); + + initTaskParams(task); + + ExecutorService chosenExecutor = gremlinTaskExecutor; + + if (task.computer()) { + chosenExecutor = this.olapTaskExecutor; + } + + // TODO: uncomment later - vermeer job + //if (task.vermeer()) { + // chosenExecutor = this.olapTaskExecutor; + //} + + if (task.gremlinTask()) { + chosenExecutor = this.gremlinTaskExecutor; + } + + if (task.schemaTask()) { + chosenExecutor = schemaTaskExecutor; + } + + ThreadPoolExecutor executor = (ThreadPoolExecutor) chosenExecutor; + if (executor.getActiveCount() < executor.getMaximumPoolSize()) { + TaskRunner runner = new TaskRunner<>(task); + chosenExecutor.submit(runner); + LOG.info("Submit task({})@({}/{})", task.id(), + this.graphSpace, this.graphName); + + return true; + } + + return false; + } + + protected void logCurrentState() { + int gremlinActive = + ((ThreadPoolExecutor) gremlinTaskExecutor).getActiveCount(); + int schemaActive = + ((ThreadPoolExecutor) schemaTaskExecutor).getActiveCount(); + int ephemeralActive = + ((ThreadPoolExecutor) ephemeralTaskExecutor).getActiveCount(); + int olapActive = + ((ThreadPoolExecutor) olapTaskExecutor).getActiveCount(); + + LOG.info("Current State: gremlinTaskExecutor({}), schemaTaskExecutor" + + "({}), ephemeralTaskExecutor({}), olapTaskExecutor({})", + gremlinActive, schemaActive, ephemeralActive, olapActive); + } + + private LockResult tryLockTask(String taskId) { + + LockResult lockResult = new LockResult(); + + try { + lockResult = + MetaManager.instance().tryLockTask(graphSpace, graphName, + taskId); + } catch (Throwable t) { + LOG.warn(String.format("try to lock task(%s) error", taskId), t); + } + + return lockResult; + } + + private void unlockTask(String taskId, LockResult lockResult) { + + try { + MetaManager.instance().unlockTask(graphSpace, graphName, taskId, + lockResult); + } catch (Throwable t) { + LOG.warn(String.format("try to unlock task(%s) error", + taskId), t); + } + } + + private boolean isLockedTask(String taskId) { + return MetaManager.instance().isLockedTask(graphSpace, + graphName, taskId); + } + + private class TaskRunner implements Runnable { + + private final HugeTask task; + + public TaskRunner(HugeTask task) { + this.task = task; + } + + @Override + public void run() { + LockResult lockResult = tryLockTask(task.id().asString()); + + initTaskParams(task); + if (lockResult.lockSuccess() && !task.completed()) { + + LOG.info("Start task({})", task.id()); + + TaskManager.setContext(task.context()); + try { + // 1. start task can be from schedule() & cronSchedule() + // 2. recheck the status of task, in case one same task + // called by both methods at same time; + HugeTask queryTask = task(this.task.id()); + if (queryTask != null && + !TaskStatus.NEW.equals(queryTask.status())) { + return; + } + + runningTasks.put(task.id(), task); + + // 任务执行不会抛出异常,HugeTask 在执行过程中,会捕获异常,并存储到 DB 中 + task.run(); + } catch (Throwable t) { + LOG.warn("exception when execute task", t); + } finally { + runningTasks.remove(task.id()); + unlockTask(task.id().asString(), lockResult); + + LOG.info("task({}) finished.", task.id().toString()); + } + } + } + } + + @Override + public String graphName() { + return this.graph.name(); + } + + @Override + public void taskDone(HugeTask task) { + // DO Nothing + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java index d716a0f00b..bfd79f6f22 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTask.java @@ -39,6 +39,8 @@ import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.job.ComputerJob; import org.apache.hugegraph.job.EphemeralJob; +import org.apache.hugegraph.job.GremlinJob; +import org.apache.hugegraph.job.schema.SchemaJob; import org.apache.hugegraph.type.define.SerialEnum; import org.apache.hugegraph.util.Blob; import org.apache.hugegraph.util.E; @@ -219,6 +221,10 @@ public String result() { return this.result; } + public synchronized void result(HugeTaskResult result) { + this.result = result.result(); + } + private synchronized boolean result(TaskStatus status, String result) { checkPropertySize(result, P.RESULT); if (this.status(status)) { @@ -264,6 +270,18 @@ public boolean computer() { return ComputerJob.COMPUTER.equals(this.type); } + public boolean schemaTask() { + return this.callable instanceof SchemaJob; + } + + public boolean gremlinTask() { + return this.callable instanceof GremlinJob; + } + + public boolean ephemeralTask() { + return this.callable instanceof EphemeralJob; + } + @Override public String toString() { return String.format("HugeTask(%s)%s", this.id, this.asMap()); @@ -345,9 +363,7 @@ protected void done() { } catch (Throwable e) { LOG.error("An exception occurred when calling done()", e); } finally { - StandardTaskScheduler scheduler = (StandardTaskScheduler) - this.scheduler(); - scheduler.taskDone(this); + this.scheduler().taskDone(this); } } @@ -427,6 +443,10 @@ protected synchronized boolean status(TaskStatus status) { return false; } + public synchronized void overwriteStatus(TaskStatus status) { + this.status = status; + } + protected void property(String key, Object value) { E.checkNotNull(key, "property key"); switch (key) { @@ -559,6 +579,75 @@ protected synchronized Object[] asArray() { return list.toArray(); } + protected synchronized Object[] asArrayWithoutResult() { + E.checkState(this.type != null, "Task type can't be null"); + E.checkState(this.name != null, "Task name can't be null"); + + List list = new ArrayList<>(28); + + list.add(T.label); + list.add(P.TASK); + + list.add(T.id); + list.add(this.id); + + list.add(P.TYPE); + list.add(this.type); + + list.add(P.NAME); + list.add(this.name); + + list.add(P.CALLABLE); + list.add(this.callable.getClass().getName()); + + list.add(P.STATUS); + list.add(this.status.code()); + + list.add(P.PROGRESS); + list.add(this.progress); + + list.add(P.CREATE); + list.add(this.create); + + list.add(P.RETRIES); + list.add(this.retries); + + if (this.description != null) { + list.add(P.DESCRIPTION); + list.add(this.description); + } + + if (this.context != null) { + list.add(P.CONTEXT); + list.add(this.context); + } + + if (this.update != null) { + list.add(P.UPDATE); + list.add(this.update); + } + + if (this.dependencies != null) { + list.add(P.DEPENDENCIES); + list.add(this.dependencies.stream().map(Id::asLong) + .collect(toOrderSet())); + } + + if (this.input != null) { + byte[] bytes = StringEncoding.compress(this.input); + checkPropertySize(bytes.length, P.INPUT); + list.add(P.INPUT); + list.add(bytes); + } + + if (this.server != null) { + list.add(P.SERVER); + list.add(this.server.asString()); + } + + return list.toArray(); + } + public Map asMap() { return this.asMap(true); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTaskResult.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTaskResult.java new file mode 100644 index 0000000000..24fc186cd9 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/HugeTaskResult.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.task; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaManager; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.type.define.Cardinality; +import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.util.Blob; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.util.StringEncoding; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.slf4j.Logger; + +public class HugeTaskResult { + private static final Logger LOG = Log.logger(HugeTaskResult.class); + private static final float DECOMPRESS_RATIO = 10.0F; + private final String taskResultId; + private volatile String result; + + public HugeTaskResult(String taskId) { + this.taskResultId = taskId; + this.result = null; + } + + public static String genId(Id taskId) { + return String.format("task_result_%d", taskId.asLong()); + } + + public static HugeTaskResult fromVertex(Vertex vertex) { + Id taskResultId = (Id) vertex.id(); + HugeTaskResult taskResult = new HugeTaskResult(taskResultId.asString()); + for (Iterator> iter = vertex.properties(); iter.hasNext(); ) { + VertexProperty prop = iter.next(); + taskResult.property(prop.key(), prop.value()); + } + return taskResult; + } + + public String taskResultId() { + return this.taskResultId; + } + + public void result(String result) { + this.result = result; + } + + public String result() { + return this.result; + } + + protected synchronized Object[] asArray() { + + List list = new ArrayList<>(6); + + list.add(T.label); + list.add(HugeTaskResult.P.TASKRESULT); + + list.add(T.id); + list.add(this.taskResultId); + + if (this.result != null) { + byte[] bytes = StringEncoding.compress(this.result); + list.add(HugeTaskResult.P.RESULT); + list.add(bytes); + } + + return list.toArray(); + } + + protected void property(String key, Object value) { + E.checkNotNull(key, "property key"); + switch (key) { + case P.RESULT: + this.result = StringEncoding.decompress(((Blob) value).bytes(), DECOMPRESS_RATIO); + break; + default: + throw new AssertionError("Unsupported key: " + key); + } + } + + public static final class P { + + public static final String TASKRESULT = Graph.Hidden.hide("taskresult"); + + public static final String RESULT = "~result_result"; + + public static String unhide(String key) { + final String prefix = Graph.Hidden.hide("result_"); + if (key.startsWith(prefix)) { + return key.substring(prefix.length()); + } + return key; + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java index 7290496f42..de0d08b03a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java @@ -346,7 +346,7 @@ private HugeServerInfo selfServerInfo() { private HugeServerInfo serverInfo(Id serverId) { return this.call(() -> { - Iterator vertices = this.tx().queryVertices(serverId); + Iterator vertices = this.tx().queryServerInfos(serverId); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -374,7 +374,7 @@ private HugeServerInfo removeServerInfo(Id serverId) { } LOG.info("Remove server info: {}", serverId); return this.call(() -> { - Iterator vertices = this.tx().queryVertices(serverId); + Iterator vertices = this.tx().queryServerInfos(serverId); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -409,7 +409,12 @@ protected Iterator serverInfos(long limit, String page) { private Iterator serverInfos(Map conditions, long limit, String page) { return this.call(() -> { - ConditionQuery query = new ConditionQuery(HugeType.VERTEX); + ConditionQuery query; + if (this.graph.backendStoreFeatures().supportsTaskAndServerVertex()) { + query = new ConditionQuery(HugeType.SERVER); + } else { + query = new ConditionQuery(HugeType.VERTEX); + } if (page != null) { query.page(page); } @@ -425,7 +430,7 @@ private Iterator serverInfos(Map conditions, if (limit != NO_LIMIT) { query.limit(limit); } - Iterator vertices = this.tx().queryVertices(query); + Iterator vertices = this.tx().queryServerInfos(query); Iterator servers = new MapperIterator<>(vertices, HugeServerInfo::fromVertex); // Convert iterator to list to avoid across thread tx accessed diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java index 48a7508206..8afe11dff2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java @@ -38,28 +38,22 @@ import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.store.BackendStore; -import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.exception.ConnectionException; import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.iterator.ExtendableIterator; import org.apache.hugegraph.iterator.MapperIterator; import org.apache.hugegraph.job.EphemeralJob; -import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; -import org.apache.hugegraph.schema.SchemaManager; import org.apache.hugegraph.schema.VertexLabel; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.task.HugeTask.P; import org.apache.hugegraph.task.TaskCallable.SysTaskCallable; import org.apache.hugegraph.task.TaskManager.ContextCallable; import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.Cardinality; -import org.apache.hugegraph.type.define.DataType; import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; -import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.slf4j.Logger; @@ -79,11 +73,6 @@ public class StandardTaskScheduler implements TaskScheduler { private volatile TaskTransaction taskTx; - private static final long NO_LIMIT = -1L; - private static final long PAGE_SIZE = 500L; - private static final long QUERY_INTERVAL = 100L; - private static final int MAX_PENDING_TASKS = 10000; - public StandardTaskScheduler(HugeGraphParams graph, ExecutorService taskExecutor, ExecutorService taskDbExecutor, @@ -107,6 +96,7 @@ public HugeGraph graph() { return this.graph.graph(); } + @Override public String graphName() { return this.graph.name(); } @@ -304,7 +294,8 @@ public synchronized void cancel(HugeTask task) { task.id(), task.status()); } - protected ServerInfoManager serverManager() { + @Override + public ServerInfoManager serverManager() { return this.serverManager; } @@ -425,7 +416,8 @@ protected void cancelTasksOnWorker(Id server) { } while (page != null); } - protected void taskDone(HugeTask task) { + @Override + public void taskDone(HugeTask task) { this.remove(task); Id selfServerId = this.serverManager().selfNodeId(); @@ -439,13 +431,17 @@ protected void taskDone(HugeTask task) { } protected void remove(HugeTask task) { + this.remove(task, false); + } + + protected void remove(HugeTask task, boolean force) { E.checkNotNull(task, "remove task"); HugeTask delTask = this.tasks.remove(task.id()); if (delTask != null && delTask != task) { LOG.warn("Task '{}' may be inconsistent status {}(expect {})", task.id(), task.status(), delTask.status()); } - assert delTask == null || delTask.completed() || + assert force || delTask == null || delTask.completed() || delTask.cancelling() || delTask.isCancelled() : delTask; } @@ -528,8 +524,8 @@ public Iterator> tasks(TaskStatus status, } public HugeTask findTask(Id id) { - HugeTask result = this.call(() -> { - Iterator vertices = this.tx().queryVertices(id); + HugeTask result = this.call(() -> { + Iterator vertices = this.tx().queryTaskInfos(id); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -556,7 +552,7 @@ public Iterator> findTask(TaskStatus status, } @Override - public HugeTask delete(Id id) { + public HugeTask delete(Id id, boolean force) { this.checkOnMasterNode("delete"); HugeTask task = this.task(id); @@ -571,21 +567,21 @@ public HugeTask delete(Id id) { * when the database status is inconsistent. */ if (task != null) { - E.checkArgument(task.completed(), + E.checkArgument(force || task.completed(), "Can't delete incomplete task '%s' in status %s" + ", Please try to cancel the task first", id, task.status()); - this.remove(task); + this.remove(task, force); } return this.call(() -> { - Iterator vertices = this.tx().queryVertices(id); + Iterator vertices = this.tx().queryTaskInfos(id); HugeVertex vertex = (HugeVertex) QueryResults.one(vertices); if (vertex == null) { return null; } HugeTask result = HugeTask.fromVertex(vertex); - E.checkState(result.completed(), + E.checkState(force || result.completed(), "Can't delete incomplete task '%s' in status %s", id, result.status()); this.tx().removeVertex(vertex); @@ -672,7 +668,12 @@ private Iterator> queryTask(String key, Object value, private Iterator> queryTask(Map conditions, long limit, String page) { return this.call(() -> { - ConditionQuery query = new ConditionQuery(HugeType.VERTEX); + ConditionQuery query; + if (this.graph.backendStoreFeatures().supportsTaskAndServerVertex()) { + query = new ConditionQuery(HugeType.TASK); + } else { + query = new ConditionQuery(HugeType.VERTEX); + } if (page != null) { query.page(page); } @@ -697,7 +698,7 @@ private Iterator> queryTask(Map conditions, private Iterator> queryTask(List ids) { return this.call(() -> { Object[] idArray = ids.toArray(new Id[0]); - Iterator vertices = this.tx().queryVertices(idArray); + Iterator vertices = this.tx().queryTaskInfos(idArray); Iterator> tasks = new MapperIterator<>(vertices, HugeTask::fromVertex); // Convert iterator to list to avoid across thread tx accessed @@ -705,11 +706,13 @@ private Iterator> queryTask(List ids) { }); } - private V call(Runnable runnable) { + @Override + public V call(Runnable runnable) { return this.call(Executors.callable(runnable, null)); } - private V call(Callable callable) { + @Override + public V call(Callable callable) { assert !Thread.currentThread().getName().startsWith( "task-db-worker") : "can't call by itself"; try { @@ -742,129 +745,4 @@ private static boolean sleep(long ms) { return false; } } - - private static class TaskTransaction extends GraphTransaction { - - public static final String TASK = P.TASK; - - public TaskTransaction(HugeGraphParams graph, BackendStore store) { - super(graph, store); - this.autoCommit(true); - } - - public HugeVertex constructVertex(HugeTask task) { - if (!this.graph().existsVertexLabel(TASK)) { - throw new HugeException("Schema is missing for task(%s) '%s'", - task.id(), task.name()); - } - return this.constructVertex(false, task.asArray()); - } - - public void deleteIndex(HugeVertex vertex) { - // Delete the old record if exist - Iterator old = this.queryVertices(vertex.id()); - HugeVertex oldV = (HugeVertex) QueryResults.one(old); - if (oldV == null) { - return; - } - this.deleteIndexIfNeeded(oldV, vertex); - } - - private boolean deleteIndexIfNeeded(HugeVertex oldV, HugeVertex newV) { - if (!oldV.value(P.STATUS).equals(newV.value(P.STATUS))) { - // Only delete vertex if index value changed else override it - this.updateIndex(this.indexLabel(P.STATUS).id(), oldV, true); - return true; - } - return false; - } - - public void initSchema() { - if (this.existVertexLabel(TASK)) { - return; - } - - HugeGraph graph = this.graph(); - String[] properties = this.initProperties(); - - // Create vertex label '~task' - VertexLabel label = graph.schema().vertexLabel(TASK) - .properties(properties) - .useCustomizeNumberId() - .nullableKeys(P.DESCRIPTION, P.CONTEXT, - P.UPDATE, P.INPUT, P.RESULT, - P.DEPENDENCIES, P.SERVER) - .enableLabelIndex(true) - .build(); - this.params().schemaTransaction().addVertexLabel(label); - - // Create index - this.createIndexLabel(label, P.STATUS); - } - - private boolean existVertexLabel(String label) { - return this.params().schemaTransaction() - .getVertexLabel(label) != null; - } - - private String[] initProperties() { - List props = new ArrayList<>(); - - props.add(createPropertyKey(P.TYPE)); - props.add(createPropertyKey(P.NAME)); - props.add(createPropertyKey(P.CALLABLE)); - props.add(createPropertyKey(P.DESCRIPTION)); - props.add(createPropertyKey(P.CONTEXT)); - props.add(createPropertyKey(P.STATUS, DataType.BYTE)); - props.add(createPropertyKey(P.PROGRESS, DataType.INT)); - props.add(createPropertyKey(P.CREATE, DataType.DATE)); - props.add(createPropertyKey(P.UPDATE, DataType.DATE)); - props.add(createPropertyKey(P.RETRIES, DataType.INT)); - props.add(createPropertyKey(P.INPUT, DataType.BLOB)); - props.add(createPropertyKey(P.RESULT, DataType.BLOB)); - props.add(createPropertyKey(P.DEPENDENCIES, DataType.LONG, - Cardinality.SET)); - props.add(createPropertyKey(P.SERVER)); - - return props.toArray(new String[0]); - } - - private String createPropertyKey(String name) { - return this.createPropertyKey(name, DataType.TEXT); - } - - private String createPropertyKey(String name, DataType dataType) { - return this.createPropertyKey(name, dataType, Cardinality.SINGLE); - } - - private String createPropertyKey(String name, DataType dataType, - Cardinality cardinality) { - HugeGraph graph = this.graph(); - SchemaManager schema = graph.schema(); - PropertyKey propertyKey = schema.propertyKey(name) - .dataType(dataType) - .cardinality(cardinality) - .build(); - this.params().schemaTransaction().addPropertyKey(propertyKey); - return name; - } - - private IndexLabel createIndexLabel(VertexLabel label, String field) { - HugeGraph graph = this.graph(); - SchemaManager schema = graph.schema(); - String name = Hidden.hide("task-index-by-" + field); - IndexLabel indexLabel = schema.indexLabel(name) - .on(HugeType.VERTEX_LABEL, TASK) - .by(field) - .build(); - this.params().schemaTransaction().addIndexLabel(label, indexLabel); - return indexLabel; - } - - private IndexLabel indexLabel(String field) { - String name = Hidden.hide("task-index-by-" + field); - HugeGraph graph = this.graph(); - return graph.indexLabel(name); - } - } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultScheduler.java new file mode 100644 index 0000000000..f076f6c466 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultScheduler.java @@ -0,0 +1,335 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.task; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; + +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.Condition; +import org.apache.hugegraph.backend.query.ConditionQuery; +import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.exception.NotFoundException; +import org.apache.hugegraph.iterator.ListIterator; +import org.apache.hugegraph.iterator.MapperIterator; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +import com.google.common.collect.ImmutableMap; + +/** + * Base class of task & result scheduler + */ +public abstract class TaskAndResultScheduler implements TaskScheduler { + /** + * Which graph the scheduler belongs to + */ + protected final HugeGraphParams graph; + protected final String graphSpace; + protected final String graphName; + + /** + * Task transactions, for persistence + */ + protected volatile TaskAndResultTransaction taskTx = null; + + private final ServerInfoManager serverManager; + + public TaskAndResultScheduler( + HugeGraphParams graph, + ExecutorService serverInfoDbExecutor) { + E.checkNotNull(graph, "graph"); + + this.graph = graph; + // TODO: uncomment later - graph space + // this.graphSpace = graph.graph().graphSpace(); + this.graphSpace = ""; + this.graphName = graph.name(); + + this.serverManager = new ServerInfoManager(graph, serverInfoDbExecutor); + } + + @Override + public void save(HugeTask task) { + E.checkArgumentNotNull(task, "Task can't be null"); + String rawResult = task.result(); + + // Save task without result; + this.call(() -> { + // Construct vertex from task + HugeVertex vertex = this.tx().constructTaskVertex(task); + // Delete index of old vertex to avoid stale index + this.tx().deleteIndex(vertex); + // Add or update task info to backend store + return this.tx().addVertex(vertex); + }); + + // 保存 result 结果 + if (rawResult != null) { + HugeTaskResult result = + new HugeTaskResult(HugeTaskResult.genId(task.id())); + result.result(rawResult); + + this.call(() -> { + // Construct vertex from task + HugeVertex vertex = this.tx().constructTaskResultVertex(result); + // Add or update task info to backend store + return this.tx().addVertex(vertex); + }); + } + } + + @Override + public HugeTask task(Id id) { + HugeTask task = this.call(() -> { + Iterator vertices = this.tx().queryTaskInfos(id); + Vertex vertex = QueryResults.one(vertices); + if (vertex == null) { + return null; + } + return HugeTask.fromVertex(vertex); + }); + + if (task == null) { + throw new NotFoundException("Can't find task with id '%s'", id); + } + + HugeTaskResult taskResult = queryTaskResult(id); + if (taskResult != null) { + task.result(taskResult); + } + + return task; + } + + @Override + public Iterator> tasks(List ids) { + return this.tasksWithoutResult(ids); + } + + @Override + public Iterator> tasks(TaskStatus status, long limit, + String page) { + if (status == null) { + return this.queryTaskWithoutResult(ImmutableMap.of(), limit, page); + } + return this.queryTaskWithoutResult(HugeTask.P.STATUS, status.code(), + limit, page); + } + + protected Iterator> queryTask(String key, Object value, + long limit, String page) { + return this.queryTask(ImmutableMap.of(key, value), limit, page); + } + + protected Iterator> queryTask(Map conditions, + long limit, String page) { + Iterator> ts = this.call(() -> { + ConditionQuery query = new ConditionQuery(HugeType.TASK); + if (page != null) { + query.page(page); + } + VertexLabel vl = this.graph().vertexLabel(HugeTask.P.TASK); + query.eq(HugeKeys.LABEL, vl.id()); + for (Map.Entry entry : conditions.entrySet()) { + PropertyKey pk = this.graph().propertyKey(entry.getKey()); + query.query(Condition.eq(pk.id(), entry.getValue())); + } + query.showHidden(true); + if (limit != NO_LIMIT) { + query.limit(limit); + } + Iterator vertices = this.tx().queryTaskInfos(query); + Iterator> tasks = + new MapperIterator<>(vertices, HugeTask::fromVertex); + // Convert iterator to list to avoid across thread tx accessed + return QueryResults.toList(tasks); + }); + + return new MapperIterator<>(ts, (task) -> { + HugeTaskResult taskResult = queryTaskResult(task.id()); + if (taskResult != null) { + task.result(taskResult); + } + return task; + }); + } + + protected Iterator> queryTask(List ids) { + ListIterator> ts = this.call( + () -> { + Object[] idArray = ids.toArray(new Id[ids.size()]); + Iterator vertices = this.tx() + .queryTaskInfos(idArray); + Iterator> tasks = + new MapperIterator<>(vertices, + HugeTask::fromVertex); + // Convert iterator to list to avoid across thread tx accessed + return QueryResults.toList(tasks); + }); + + Iterator results = queryTaskResult(ids); + + HashMap resultCaches = new HashMap<>(); + while (results.hasNext()) { + HugeTaskResult entry = results.next(); + resultCaches.put(entry.taskResultId(), entry); + } + + return new MapperIterator<>(ts, (task) -> { + HugeTaskResult taskResult = + resultCaches.get(HugeTaskResult.genId(task.id())); + if (taskResult != null) { + task.result(taskResult); + } + return task; + }); + } + + protected HugeTask taskWithoutResult(Id id) { + HugeTask result = this.call(() -> { + Iterator vertices = this.tx().queryTaskInfos(id); + Vertex vertex = QueryResults.one(vertices); + if (vertex == null) { + return null; + } + return HugeTask.fromVertex(vertex); + }); + + return result; + } + + protected Iterator> tasksWithoutResult(List ids) { + return this.call(() -> { + Object[] idArray = ids.toArray(new Id[ids.size()]); + Iterator vertices = this.tx().queryTaskInfos(idArray); + Iterator> tasks = + new MapperIterator<>(vertices, HugeTask::fromVertex); + // Convert iterator to list to avoid across thread tx accessed + return QueryResults.toList(tasks); + }); + } + + protected Iterator> tasksWithoutResult(TaskStatus status, + long limit, + String page) { + if (status == null) { + return this.queryTaskWithoutResult(ImmutableMap.of(), limit, page); + } + return this.queryTaskWithoutResult(HugeTask.P.STATUS, status.code(), + limit, page); + } + + protected Iterator> queryTaskWithoutResult(String key, + Object value, + long limit, String page) { + return this.queryTaskWithoutResult(ImmutableMap.of(key, value), limit, page); + } + + protected Iterator> queryTaskWithoutResult(Map conditions, long limit, String page) { + return this.call(() -> { + ConditionQuery query = new ConditionQuery(HugeType.TASK); + if (page != null) { + query.page(page); + } + VertexLabel vl = this.graph().vertexLabel(HugeTask.P.TASK); + query.eq(HugeKeys.LABEL, vl.id()); + for (Map.Entry entry : conditions.entrySet()) { + PropertyKey pk = this.graph().propertyKey(entry.getKey()); + query.query(Condition.eq(pk.id(), entry.getValue())); + } + query.showHidden(true); + if (limit != NO_LIMIT) { + query.limit(limit); + } + Iterator vertices = this.tx().queryTaskInfos(query); + Iterator> tasks = + new MapperIterator<>(vertices, HugeTask::fromVertex); + // Convert iterator to list to avoid across thread tx accessed + return QueryResults.toList(tasks); + }); + } + + protected HugeTaskResult queryTaskResult(Id taskid) { + HugeTaskResult result = this.call(() -> { + Iterator vertices = + this.tx().queryTaskInfos(HugeTaskResult.genId(taskid)); + Vertex vertex = QueryResults.one(vertices); + if (vertex == null) { + return null; + } + + return HugeTaskResult.fromVertex(vertex); + }); + + return result; + } + + protected Iterator queryTaskResult(List taskIds) { + return this.call(() -> { + Object[] idArray = + taskIds.stream().map(HugeTaskResult::genId).toArray(); + Iterator vertices = this.tx() + .queryTaskInfos(idArray); + Iterator tasks = + new MapperIterator<>(vertices, + HugeTaskResult::fromVertex); + // Convert iterator to list to avoid across thread tx accessed + return QueryResults.toList(tasks); + }); + } + + protected TaskAndResultTransaction tx() { + // NOTE: only the owner thread can access task tx + if (this.taskTx == null) { + /* + * NOTE: don't synchronized(this) due to scheduler thread hold + * this lock through scheduleTasks(), then query tasks and wait + * for db-worker thread after call(), the tx may not be initialized + * but can't catch this lock, then cause deadlock. + * We just use this.serverManager as a monitor here + */ + synchronized (this.serverManager) { + if (this.taskTx == null) { + BackendStore store = this.graph.loadSystemStore(); + TaskAndResultTransaction tx = new TaskAndResultTransaction(this.graph, store); + assert this.taskTx == null; // may be reentrant? + this.taskTx = tx; + } + } + } + assert this.taskTx != null; + return this.taskTx; + } + + @Override + public ServerInfoManager serverManager() { + return this.serverManager; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultTransaction.java new file mode 100644 index 0000000000..c39ae23615 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskAndResultTransaction.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.task; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaManager; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.type.define.Cardinality; +import org.apache.hugegraph.type.define.DataType; + +public class TaskAndResultTransaction extends TaskTransaction { + + public static final String TASKRESULT = HugeTaskResult.P.TASKRESULT; + + /** + * Task transactions, for persistence + */ + protected volatile TaskAndResultTransaction taskTx = null; + + public TaskAndResultTransaction(HugeGraphParams graph, BackendStore store) { + super(graph, store); + this.autoCommit(true); + } + + public HugeVertex constructTaskVertex(HugeTask task) { + if (!this.graph().existsVertexLabel(TASK)) { + throw new HugeException("Schema is missing for task(%s) '%s'", + task.id(), task.name()); + } + + return this.constructVertex(false, task.asArrayWithoutResult()); + } + + public HugeVertex constructTaskResultVertex(HugeTaskResult taskResult) { + if (!this.graph().existsVertexLabel(TASKRESULT)) { + throw new HugeException("Schema is missing for task result"); + } + + return this.constructVertex(false, taskResult.asArray()); + } + + @Override + public void initSchema() { + super.initSchema(); + + if (this.graph().existsVertexLabel(TASKRESULT)) { + return; + } + + HugeGraph graph = this.graph(); + String[] properties = this.initTaskResultProperties(); + + // Create vertex label '~taskresult' + VertexLabel label = + graph.schema().vertexLabel(HugeTaskResult.P.TASKRESULT).properties(properties) + .nullableKeys(HugeTaskResult.P.RESULT) + .useCustomizeStringId().enableLabelIndex(true).build(); + + graph.addVertexLabel(label); + } + + private String[] initTaskResultProperties() { + List props = new ArrayList<>(); + props.add(createPropertyKey(HugeTaskResult.P.RESULT, DataType.BLOB)); + + return props.toArray(new String[0]); + } + + private String createPropertyKey(String name, DataType dataType) { + return createPropertyKey(name, dataType, Cardinality.SINGLE); + } + + private String createPropertyKey(String name, DataType dataType, Cardinality cardinality) { + SchemaManager schema = this.graph().schema(); + PropertyKey propertyKey = + schema.propertyKey(name).dataType(dataType).cardinality(cardinality).build(); + this.graph().addPropertyKey(propertyKey); + return name; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java index 144387949b..afc5c9d49b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java @@ -48,6 +48,11 @@ public final class TaskManager { "server-info-db-worker-%d"; public static final String TASK_SCHEDULER = "task-scheduler-%d"; + public static final String OLAP_TASK_WORKER = "olap-task-worker-%d"; + public static final String SCHEMA_TASK_WORKER = "schema-task-worker-%d"; + public static final String EPHEMERAL_TASK_WORKER = "ephemeral-task-worker-%d"; + public static final String DISTRIBUTED_TASK_SCHEDULER = "distributed-scheduler-%d"; + protected static final long SCHEDULE_PERIOD = 1000L; // unit ms private static final long TX_CLOSE_TIMEOUT = 30L; // unit s private static final int THREADS = 4; @@ -60,6 +65,11 @@ public final class TaskManager { private final ExecutorService serverInfoDbExecutor; private final PausableScheduledThreadPool schedulerExecutor; + private final ExecutorService schemaTaskExecutor; + private final ExecutorService olapTaskExecutor; + private final ExecutorService ephemeralTaskExecutor; + private final PausableScheduledThreadPool distributedSchedulerExecutor; + private boolean enableRoleElected = false; public static TaskManager instance() { @@ -76,6 +86,17 @@ private TaskManager(int pool) { 1, TASK_DB_WORKER); this.serverInfoDbExecutor = ExecutorUtil.newFixedThreadPool( 1, SERVER_INFO_DB_WORKER); + + this.schemaTaskExecutor = ExecutorUtil.newFixedThreadPool(pool, + SCHEMA_TASK_WORKER); + this.olapTaskExecutor = ExecutorUtil.newFixedThreadPool(pool, + OLAP_TASK_WORKER); + this.ephemeralTaskExecutor = ExecutorUtil.newFixedThreadPool(pool, + EPHEMERAL_TASK_WORKER); + this.distributedSchedulerExecutor = + ExecutorUtil.newPausableScheduledThreadPool(1, + DISTRIBUTED_TASK_SCHEDULER); + // For schedule task to run, just one thread is ok this.schedulerExecutor = ExecutorUtil.newPausableScheduledThreadPool( 1, TASK_SCHEDULER); @@ -88,11 +109,36 @@ private TaskManager(int pool) { public void addScheduler(HugeGraphParams graph) { E.checkArgumentNotNull(graph, "The graph can't be null"); - - TaskScheduler scheduler = new StandardTaskScheduler(graph, - this.taskExecutor, this.taskDbExecutor, - this.serverInfoDbExecutor); - this.schedulers.put(graph, scheduler); + LOG.info("Use {} as the scheduler of graph ({})", + graph.schedulerType(), graph.name()); + // TODO: 如当前服务绑定到指定的非 DEFAULT 图空间,非当前图空间的图不再创建任务调度器 (graph space) + switch (graph.schedulerType()) { + case "distributed": { + TaskScheduler scheduler = + new DistributedTaskScheduler( + graph, + distributedSchedulerExecutor, + taskDbExecutor, + schemaTaskExecutor, + olapTaskExecutor, + taskExecutor, /* gremlinTaskExecutor */ + ephemeralTaskExecutor, + serverInfoDbExecutor); + this.schedulers.put(graph, scheduler); + break; + } + case "local": + default: { + TaskScheduler scheduler = + new StandardTaskScheduler( + graph, + this.taskExecutor, + this.taskDbExecutor, + this.serverInfoDbExecutor); + this.schedulers.put(graph, scheduler); + break; + } + } } public void closeScheduler(HugeGraphParams graph) { @@ -123,6 +169,10 @@ public void closeScheduler(HugeGraphParams graph) { if (!this.schedulerExecutor.isTerminated()) { this.closeSchedulerTx(graph); } + + if (!this.distributedSchedulerExecutor.isTerminated()) { + this.closeDistributedSchedulerTx(graph); + } } private void closeTaskTx(HugeGraphParams graph) { @@ -157,6 +207,21 @@ private void closeSchedulerTx(HugeGraphParams graph) { } } + private void closeDistributedSchedulerTx(HugeGraphParams graph) { + final Callable closeTx = () -> { + // Do close-tx for current thread + graph.closeTx(); + // Let other threads run + Thread.yield(); + return null; + }; + try { + this.distributedSchedulerExecutor.submit(closeTx).get(); + } catch (Exception e) { + throw new HugeException("Exception when closing scheduler tx", e); + } + } + public void pauseScheduledThreadPool() { this.schedulerExecutor.pauseSchedule(); } @@ -170,8 +235,7 @@ public TaskScheduler getScheduler(HugeGraphParams graph) { } public ServerInfoManager getServerInfoManager(HugeGraphParams graph) { - StandardTaskScheduler scheduler = (StandardTaskScheduler) - this.getScheduler(graph); + TaskScheduler scheduler = this.getScheduler(graph); if (scheduler == null) { return null; } @@ -195,10 +259,21 @@ public void shutdown(long timeout) { } } + if (terminated && !this.distributedSchedulerExecutor.isShutdown()) { + this.distributedSchedulerExecutor.shutdown(); + try { + terminated = this.distributedSchedulerExecutor.awaitTermination(timeout, + unit); + } catch (Throwable e) { + ex = e; + } + } + if (terminated && !this.taskExecutor.isShutdown()) { this.taskExecutor.shutdown(); try { - terminated = this.taskExecutor.awaitTermination(timeout, unit); + terminated = this.taskExecutor.awaitTermination(timeout, + unit); } catch (Throwable e) { ex = e; } @@ -217,7 +292,38 @@ public void shutdown(long timeout) { if (terminated && !this.taskDbExecutor.isShutdown()) { this.taskDbExecutor.shutdown(); try { - terminated = this.taskDbExecutor.awaitTermination(timeout, unit); + terminated = this.taskDbExecutor.awaitTermination(timeout, + unit); + } catch (Throwable e) { + ex = e; + } + } + + if (terminated && !this.ephemeralTaskExecutor.isShutdown()) { + this.ephemeralTaskExecutor.shutdown(); + try { + terminated = this.ephemeralTaskExecutor.awaitTermination(timeout, + unit); + } catch (Throwable e) { + ex = e; + } + } + + if (terminated && !this.schemaTaskExecutor.isShutdown()) { + this.schemaTaskExecutor.shutdown(); + try { + terminated = this.schemaTaskExecutor.awaitTermination(timeout, + unit); + } catch (Throwable e) { + ex = e; + } + } + + if (terminated && !this.olapTaskExecutor.isShutdown()) { + this.olapTaskExecutor.shutdown(); + try { + terminated = this.olapTaskExecutor.awaitTermination(timeout, + unit); } catch (Throwable e) { ex = e; } @@ -292,7 +398,7 @@ private void scheduleOrExecuteJob() { // Called by scheduler timer try { for (TaskScheduler entry : this.schedulers.values()) { - StandardTaskScheduler scheduler = (StandardTaskScheduler) entry; + TaskScheduler scheduler = entry; // Maybe other thread close&remove scheduler at the same time synchronized (scheduler) { this.scheduleOrExecuteJobForGraph(scheduler); @@ -303,56 +409,59 @@ private void scheduleOrExecuteJob() { } } - private void scheduleOrExecuteJobForGraph(StandardTaskScheduler scheduler) { + private void scheduleOrExecuteJobForGraph(TaskScheduler scheduler) { E.checkNotNull(scheduler, "scheduler"); - ServerInfoManager serverManager = scheduler.serverManager(); - String graph = scheduler.graphName(); - - LockUtil.lock(graph, LockUtil.GRAPH_LOCK); - try { - /* - * Skip if: - * graph is closed (iterate schedulers before graph is closing) - * or - * graph is not initialized(maybe truncated or cleared). - * - * If graph is closing by other thread, current thread get - * serverManager and try lock graph, at the same time other - * thread deleted the lock-group, current thread would get - * exception 'LockGroup xx does not exists'. - * If graph is closed, don't call serverManager.initialized() - * due to it will reopen graph tx. - */ - if (!serverManager.graphIsReady()) { - return; - } - - // Update server heartbeat - serverManager.heartbeat(); + if (scheduler instanceof StandardTaskScheduler) { + StandardTaskScheduler standardTaskScheduler = (StandardTaskScheduler) (scheduler); + ServerInfoManager serverManager = scheduler.serverManager(); + String graph = scheduler.graphName(); - /* - * Master will schedule tasks to suitable servers. - * Note a Worker may become to a Master, so elected-Master also needs to - * execute tasks assigned by previous Master when enableRoleElected=true. - * However, when enableRoleElected=false, a Master is only set by the - * config assignment, assigned-Master always stays the same state. - */ - if (serverManager.selfIsMaster()) { - scheduler.scheduleTasksOnMaster(); - if (!this.enableRoleElected && !serverManager.onlySingleNode()) { - // assigned-Master + non-single-node don't need to execute tasks + LockUtil.lock(graph, LockUtil.GRAPH_LOCK); + try { + /* + * Skip if: + * graph is closed (iterate schedulers before graph is closing) + * or + * graph is not initialized(maybe truncated or cleared). + * + * If graph is closing by other thread, current thread get + * serverManager and try lock graph, at the same time other + * thread deleted the lock-group, current thread would get + * exception 'LockGroup xx does not exists'. + * If graph is closed, don't call serverManager.initialized() + * due to it will reopen graph tx. + */ + if (!serverManager.graphIsReady()) { return; } - } - // Execute queued tasks scheduled to current server - scheduler.executeTasksOnWorker(serverManager.selfNodeId()); + // Update server heartbeat + serverManager.heartbeat(); + + /* + * Master will schedule tasks to suitable servers. + * Note a Worker may become to a Master, so elected-Master also needs to + * execute tasks assigned by previous Master when enableRoleElected=true. + * However, when enableRoleElected=false, a Master is only set by the + * config assignment, assigned-Master always stays the same state. + */ + if (serverManager.selfIsMaster()) { + standardTaskScheduler.scheduleTasksOnMaster(); + if (!this.enableRoleElected && !serverManager.onlySingleNode()) { + // assigned-Master + non-single-node don't need to execute tasks + return; + } + } - // Cancel tasks scheduled to current server - scheduler.cancelTasksOnWorker(serverManager.selfNodeId()); - } finally { - LockUtil.unlock(graph, LockUtil.GRAPH_LOCK); + // Execute queued tasks scheduled to current server + standardTaskScheduler.executeTasksOnWorker(serverManager.selfNodeId()); + + // Cancel tasks scheduled to current server + standardTaskScheduler.cancelTasksOnWorker(serverManager.selfNodeId()); + } finally { + LockUtil.unlock(graph, LockUtil.GRAPH_LOCK); + } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java index 28eb232a2f..b72ee91a8d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskScheduler.java @@ -19,6 +19,7 @@ import java.util.Iterator; import java.util.List; +import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; @@ -27,6 +28,11 @@ public interface TaskScheduler { + long NO_LIMIT = -1L; + long PAGE_SIZE = 500L; + long QUERY_INTERVAL = 100L; + int MAX_PENDING_TASKS = 10000; + HugeGraph graph(); int pendingTasks(); @@ -39,7 +45,7 @@ public interface TaskScheduler { void save(HugeTask task); - HugeTask delete(Id id); + HugeTask delete(Id id, boolean force); HugeTask task(Id id); @@ -62,4 +68,14 @@ void waitUntilAllTasksCompleted(long seconds) throws TimeoutException; void checkRequirement(String op); + + V call(Callable callable); + + V call(Runnable runnable); + + ServerInfoManager serverManager(); + + String graphName(); + + void taskDone(HugeTask task); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java index 14613ee8c6..d87a20c0a5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskStatus.java @@ -38,7 +38,9 @@ public enum TaskStatus implements SerialEnum { SUCCESS(7, "success"), CANCELLING(8, "cancelling"), CANCELLED(9, "cancelled"), - FAILED(10, "failed"); + FAILED(10, "failed"), + HANGING(11, "hanging"), + DELETING(12, "deleting"); // NOTE: order is important(RESTORING > RUNNING > QUEUED) when restoring public static final List PENDING_STATUSES = ImmutableList.of( diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskTransaction.java new file mode 100644 index 0000000000..2b27019c74 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskTransaction.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.task; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaManager; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.Cardinality; +import org.apache.hugegraph.type.define.DataType; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +public class TaskTransaction extends GraphTransaction { + + public static final String TASK = HugeTask.P.TASK; + + public TaskTransaction(HugeGraphParams graph, BackendStore store) { + super(graph, store); + this.autoCommit(true); + } + + public HugeVertex constructVertex(HugeTask task) { + if (!this.graph().existsVertexLabel(TASK)) { + throw new HugeException("Schema is missing for task(%s) '%s'", + task.id(), task.name()); + } + return this.constructVertex(false, task.asArray()); + } + + public void deleteIndex(HugeVertex vertex) { + // Delete the old record if exist + Iterator old = this.queryTaskInfos(vertex.id()); + HugeVertex oldV = (HugeVertex) QueryResults.one(old); + if (oldV == null) { + return; + } + this.deleteIndexIfNeeded(oldV, vertex); + } + + private boolean deleteIndexIfNeeded(HugeVertex oldV, HugeVertex newV) { + if (!oldV.value(HugeTask.P.STATUS).equals(newV.value(HugeTask.P.STATUS))) { + // Only delete vertex if index value changed else override it + this.updateIndex(this.indexLabel(HugeTask.P.STATUS).id(), oldV, true); + return true; + } + return false; + } + + public void initSchema() { + if (this.existVertexLabel(TASK)) { + return; + } + + HugeGraph graph = this.graph(); + String[] properties = this.initProperties(); + + // Create vertex label '~task' + VertexLabel label = graph.schema().vertexLabel(TASK) + .properties(properties) + .useCustomizeNumberId() + .nullableKeys(HugeTask.P.DESCRIPTION, HugeTask.P.CONTEXT, + HugeTask.P.UPDATE, HugeTask.P.INPUT, + HugeTask.P.RESULT, + HugeTask.P.DEPENDENCIES, HugeTask.P.SERVER) + .enableLabelIndex(true) + .build(); + this.params().schemaTransaction().addVertexLabel(label); + + // Create index + this.createIndexLabel(label, HugeTask.P.STATUS); + } + + private boolean existVertexLabel(String label) { + return this.params().schemaTransaction() + .getVertexLabel(label) != null; + } + + private String[] initProperties() { + List props = new ArrayList<>(); + + props.add(createPropertyKey(HugeTask.P.TYPE)); + props.add(createPropertyKey(HugeTask.P.NAME)); + props.add(createPropertyKey(HugeTask.P.CALLABLE)); + props.add(createPropertyKey(HugeTask.P.DESCRIPTION)); + props.add(createPropertyKey(HugeTask.P.CONTEXT)); + props.add(createPropertyKey(HugeTask.P.STATUS, DataType.BYTE)); + props.add(createPropertyKey(HugeTask.P.PROGRESS, DataType.INT)); + props.add(createPropertyKey(HugeTask.P.CREATE, DataType.DATE)); + props.add(createPropertyKey(HugeTask.P.UPDATE, DataType.DATE)); + props.add(createPropertyKey(HugeTask.P.RETRIES, DataType.INT)); + props.add(createPropertyKey(HugeTask.P.INPUT, DataType.BLOB)); + props.add(createPropertyKey(HugeTask.P.RESULT, DataType.BLOB)); + props.add(createPropertyKey(HugeTask.P.DEPENDENCIES, DataType.LONG, + Cardinality.SET)); + props.add(createPropertyKey(HugeTask.P.SERVER)); + + return props.toArray(new String[0]); + } + + private String createPropertyKey(String name) { + return this.createPropertyKey(name, DataType.TEXT); + } + + private String createPropertyKey(String name, DataType dataType) { + return this.createPropertyKey(name, dataType, Cardinality.SINGLE); + } + + private String createPropertyKey(String name, DataType dataType, + Cardinality cardinality) { + HugeGraph graph = this.graph(); + SchemaManager schema = graph.schema(); + PropertyKey propertyKey = schema.propertyKey(name) + .dataType(dataType) + .cardinality(cardinality) + .build(); + this.params().schemaTransaction().addPropertyKey(propertyKey); + return name; + } + + private IndexLabel createIndexLabel(VertexLabel label, String field) { + HugeGraph graph = this.graph(); + SchemaManager schema = graph.schema(); + String name = Graph.Hidden.hide("task-index-by-" + field); + IndexLabel indexLabel = schema.indexLabel(name) + .on(HugeType.VERTEX_LABEL, TASK) + .by(field) + .build(); + this.params().schemaTransaction().addIndexLabel(label, indexLabel); + return indexLabel; + } + + private IndexLabel indexLabel(String field) { + String name = Graph.Hidden.hide("task-index-by-" + field); + HugeGraph graph = this.graph(); + return graph.indexLabel(name); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java new file mode 100644 index 0000000000..912ed43d55 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.type.define; + +public enum EdgeLabelType implements SerialEnum { + + + NORMAL(1, "NORMAL"), + + PARENT(2, "PARENT"), + + SUB(3, "SUB"), + + GENERAL(4, "GENERAL"), + ; + + static { + SerialEnum.register(EdgeLabelType.class); + } + + private final byte code; + private final String name; + + EdgeLabelType(int code, String name) { + assert code < 256; + this.code = (byte) code; + this.name = name; + } + + @Override + public byte code() { + return this.code; + } + + public String string() { + return this.name; + } + + public boolean normal() { + return this == NORMAL; + } + + public boolean parent() { + return this == PARENT; + } + + public boolean sub() { + return this == SUB; + } + + public boolean general() { + return this == GENERAL; + } + +} diff --git a/hugegraph-server/hugegraph-dist/pom.xml b/hugegraph-server/hugegraph-dist/pom.xml index baba3835c2..ee954741d8 100644 --- a/hugegraph-server/hugegraph-dist/pom.xml +++ b/hugegraph-server/hugegraph-dist/pom.xml @@ -98,6 +98,11 @@ hugegraph-postgresql ${revision} + + org.apache.hugegraph + hugegraph-hstore + ${revision} + org.apache.tinkerpop diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties index 5f77efe939..1a3532914b 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties @@ -19,11 +19,21 @@ edge.cache_type=l2 #vertex.default_label=vertex -backend=rocksdb +backend=hstore serializer=binary store=hugegraph +# pd config +pd.peers=127.0.0.1:8686 + +# task config +task.scheduler_type=local +task.schedule_period=10 +task.retry=0 +task.wait_timeout=10 + +# raft config raft.mode=false raft.path=./raft-log raft.safe_read=true @@ -45,6 +55,7 @@ raft.rpc_connect_timeout=5000 raft.rpc_timeout=60 raft.install_snapshot_rpc_timeout=36000 +# search config search.text_analyzer=jieba search.text_analyzer_mode=INDEX diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/install-backend.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-backend.sh index d73d0b5be9..2c66b00cf0 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/travis/install-backend.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-backend.sh @@ -47,6 +47,9 @@ case $BACKEND in postgresql) "$TRAVIS_DIR"/install-postgresql-via-docker.sh ;; + hstore) + "$TRAVIS_DIR"/install-hstore.sh + ;; *) # don't need to install for other backends ;; diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/install-hstore.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-hstore.sh new file mode 100755 index 0000000000..f1a8373eff --- /dev/null +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-hstore.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -ev + +TRAVIS_DIR=$(dirname "$0") + +. "$TRAVIS_DIR"/start-pd.sh +. "$TRAVIS_DIR"/start-store.sh diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/start-pd.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-pd.sh index 15fbba3ec1..ca1f11452e 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/travis/start-pd.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-pd.sh @@ -1,19 +1,19 @@ #!/bin/bash # # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with this -# work for additional information regarding copyright ownership. The ASF -# licenses this file to You under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # set -ev diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/start-server.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-server.sh index 22af1ae330..b6ce9d12cc 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/travis/start-server.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-server.sh @@ -39,7 +39,8 @@ declare -A backend_serializer_map=(["memory"]="text" \ ["mysql"]="mysql" \ ["hbase"]="hbase" \ ["rocksdb"]="binary" \ - ["postgresql"]="postgresql") + ["postgresql"]="postgresql" \ + ["hstore"]="binary") SERIALIZER=${backend_serializer_map[$BACKEND]} diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/start-store.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-store.sh new file mode 100755 index 0000000000..a685147c59 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/start-store.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +set -ev + +HOME_DIR=$(pwd) +STORE_DIR=$HOME_DIR/hugegraph-store/apache-hugegraph-incubating-store-1.3.0 + +pushd $STORE_DIR +. bin/start-hugegraph-store.sh +sleep 10 +popd diff --git a/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java b/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java index 06b8fe28d3..74c630dd97 100644 --- a/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java +++ b/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java @@ -91,6 +91,9 @@ private static void registerBackend(String backend) { case "postgresql": registerPostgresql(); break; + case "hstore": + registerHstore(); + break; default: throw new HugeException("Unsupported backend type '%s'", backend); } @@ -190,6 +193,15 @@ public static void registerPostgresql() { ".PostgresqlStoreProvider"); } + public static void registerHstore() { + // Register config + OptionSpace.register("hstore", + "org.apache.hugegraph.backend.store.hstore.HstoreOptions"); + // Register backend + BackendProviderFactory.register("hstore", + "org.apache.hugegraph.backend.store.hstore.HstoreProvider"); + } + public static void registerServer() { // Register ServerOptions (rest-server) OptionSpace.register("server", "org.apache.hugegraph.config.ServerOptions"); diff --git a/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties b/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties index af68e80823..de7e31bd58 100644 --- a/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties +++ b/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties @@ -15,4 +15,4 @@ # limitations under the License. # -backends=[cassandra, scylladb, rocksdb, mysql, palo, hbase, postgresql] +backends=[cassandra, scylladb, rocksdb, mysql, palo, hbase, postgresql, hstore] diff --git a/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java b/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java index aca712a882..1d75c00944 100644 --- a/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java +++ b/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java @@ -96,7 +96,7 @@ protected void registerTableManager(HugeType type, HbaseTable table) { @Override protected final HbaseTable table(HugeType type) { assert type != null; - HbaseTable table = this.tables.get(type); + HbaseTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java b/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java index d898c2b766..c5d983feff 100644 --- a/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java +++ b/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java @@ -348,7 +348,7 @@ protected Collection tables() { @Override protected final MysqlTable table(HugeType type) { assert type != null; - MysqlTable table = this.tables.get(type); + MysqlTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java index 46315d0189..c34d9632fc 100644 --- a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java +++ b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java @@ -152,7 +152,7 @@ protected void unregisterTableManager(String name) { @Override protected final RocksDBTable table(HugeType type) { - RocksDBTable table = this.tables.get(type); + RocksDBTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table: '%s'", type); } diff --git a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBTable.java b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBTable.java index 129af7ac78..824986d22c 100644 --- a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBTable.java +++ b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBTable.java @@ -42,6 +42,7 @@ import org.apache.hugegraph.backend.store.BackendEntryIterator; import org.apache.hugegraph.backend.store.BackendTable; import org.apache.hugegraph.backend.store.Shard; +import org.apache.hugegraph.backend.store.rocksdb.RocksDBSessions.Session; import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.iterator.FlatMapperIterator; import org.apache.hugegraph.type.HugeType; @@ -149,6 +150,15 @@ public Iterator query(RocksDBSessions.Session session, Query query return newEntryIterator(this.queryBy(session, query), query); } + @Override + public Iterator queryOlap(Session session, Query query) { + if (query.limit() == 0L && !query.noLimit()) { + // LOG.debug("Return empty result(limit=0) for query {}", query); + return Collections.emptyIterator(); + } + return newEntryIteratorOlap(this.queryBy(session, query), query, true); + } + protected BackendColumnIterator queryBy(RocksDBSessions.Session session, Query query) { // Query all if (query.empty()) { @@ -298,6 +308,19 @@ protected static BackendEntryIterator newEntryIterator(BackendColumnIterator col }); } + protected static BackendEntryIterator newEntryIteratorOlap( + BackendColumnIterator cols, Query query, boolean isOlap) { + return new BinaryEntryIterator<>(cols, query, (entry, col) -> { + if (entry == null || !entry.belongToMe(col)) { + HugeType type = query.resultType(); + // NOTE: only support BinaryBackendEntry currently + entry = new BinaryBackendEntry(type, col.name, false, isOlap); + } + entry.columns(col); + return entry; + }); + } + protected static long sizeOfBackendEntry(BackendEntry entry) { return BinaryEntryIterator.sizeOfEntry(entry); } diff --git a/hugegraph-server/hugegraph-test/pom.xml b/hugegraph-server/hugegraph-test/pom.xml index 47e5e5fac0..1bd869b5ad 100644 --- a/hugegraph-server/hugegraph-test/pom.xml +++ b/hugegraph-server/hugegraph-test/pom.xml @@ -382,5 +382,15 @@ postgresql + + hstore + + false + + + hstore + binary + + diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java index 0ff6151e30..e93373c1a3 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java @@ -299,6 +299,9 @@ public void testMetricsBackend() { } } break; + case "hstore": + // TODO(metrics): check metrics info + break; default: Assert.fail("Unexpected backend " + backend); break; diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/BaseCoreTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/BaseCoreTest.java index dea160b080..a042533843 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/BaseCoreTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/BaseCoreTest.java @@ -21,6 +21,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.cache.CacheManager; import org.apache.hugegraph.backend.store.BackendFeatures; import org.apache.hugegraph.dist.RegisterUtil; import org.apache.hugegraph.masterelection.GlobalMasterInfo; @@ -90,6 +91,9 @@ public static void clear() { public void setup() { this.clearData(); this.clearSchema(); + // QUESTION: here we should consider to clear cache + // but with this line of code, many ci will fail + // this.clearCache(); } @After @@ -146,6 +150,11 @@ private void clearSchema() { }); } + private void clearCache() { + CacheManager cacheManager = CacheManager.instance(); + cacheManager.clearCache(); + } + protected void mayCommitTx() { // Commit tx probabilistically for test if (new Random().nextBoolean()) { diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/EdgeCoreTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/EdgeCoreTest.java index b1935488a5..1189356bf2 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/EdgeCoreTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/EdgeCoreTest.java @@ -517,7 +517,7 @@ public void testAddEdgeWithInvalidSortkey() { Assert.assertContains("Zero bytes may not occur in string " + "parameters", e.getCause().getMessage()); }); - } else if (backend.equals("rocksdb") || backend.equals("hbase")) { + } else if (ImmutableSet.of("rocksdb", "hbase", "hstore").contains(backend)) { Assert.assertThrows(IllegalArgumentException.class, () -> { james.addEdge("write", book, "time", "2017-5-27\u0000"); graph.tx().commit(); @@ -5195,6 +5195,7 @@ public void testScanEdgeInPaging() { query.scan(String.valueOf(Long.MIN_VALUE), String.valueOf(Long.MAX_VALUE)); } else { + // QUESTION:The query method may not be well adapted query.scan(BackendTable.ShardSplitter.START, BackendTable.ShardSplitter.END); } @@ -5822,7 +5823,7 @@ public void testAddEdgePropertyWithSpecialValueForSecondaryIndex() { String backend = graph.backend(); Set nonZeroBackends = ImmutableSet.of("postgresql", - "rocksdb", "hbase"); + "rocksdb", "hbase", "hstore"); if (nonZeroBackends.contains(backend)) { Assert.assertThrows(Exception.class, () -> { louise.addEdge("strike", sean, "id", 4, diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/MultiGraphsTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/MultiGraphsTest.java index 85c2e33c73..0849567988 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/MultiGraphsTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/MultiGraphsTest.java @@ -92,11 +92,11 @@ public void testCopySchemaWithMultiGraphs() { SchemaManager schema = g1.schema(); - schema.propertyKey("id").asInt().create(); - schema.propertyKey("name").asText().create(); - schema.propertyKey("age").asInt().valueSingle().create(); - schema.propertyKey("city").asText().create(); - schema.propertyKey("weight").asDouble().valueList().create(); + schema.propertyKey("id").asInt().checkExist(false).create(); + schema.propertyKey("name").asText().checkExist(false).create(); + schema.propertyKey("age").asInt().valueSingle().checkExist(false).create(); + schema.propertyKey("city").asText().checkExist(false).create(); + schema.propertyKey("weight").asDouble().valueList().checkExist(false).create(); schema.propertyKey("born").asDate().ifNotExist().create(); schema.propertyKey("time").asDate().ifNotExist().create(); @@ -211,8 +211,8 @@ public void testCopySchemaWithMultiGraphsWithConflict() { g1.serverStarted(GlobalMasterInfo.master("server-g1c")); g2.serverStarted(GlobalMasterInfo.master("server-g2c")); - g1.schema().propertyKey("id").asInt().create(); - g2.schema().propertyKey("id").asText().create(); + g1.schema().propertyKey("id").asInt().checkExist(false).create(); + g2.schema().propertyKey("id").asText().checkExist(false).create(); Assert.assertThrows(ExistedException.class, () -> { g2.schema().copyFrom(g1.schema()); diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java index 7d4e8a8917..1a6738a8e3 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java @@ -56,7 +56,7 @@ public void setup() { Iterator> iter = scheduler.tasks(null, -1, null); while (iter.hasNext()) { - scheduler.delete(iter.next().id()); + scheduler.delete(iter.next().id(), false); } } @@ -77,7 +77,7 @@ public void testTask() throws TimeoutException { Assert.assertFalse(task.completed()); Assert.assertThrows(IllegalArgumentException.class, () -> { - scheduler.delete(id); + scheduler.delete(id, false); }, e -> { Assert.assertContains("Can't delete incomplete task '88888'", e.getMessage()); @@ -107,7 +107,7 @@ public void testTask() throws TimeoutException { Assert.assertEquals("test-task", iter.next().name()); Assert.assertFalse(iter.hasNext()); - scheduler.delete(id); + scheduler.delete(id, false); iter = scheduler.tasks(null, 10, null); Assert.assertFalse(iter.hasNext()); Assert.assertThrows(NotFoundException.class, () -> { diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/VertexCoreTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/VertexCoreTest.java index 585734cf40..f009cec855 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/VertexCoreTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/VertexCoreTest.java @@ -76,6 +76,7 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; +import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -156,6 +157,12 @@ public void initSchema() { .create(); } + @After + public void resetGraphMode() { + // In OLAP-related tests, if an error occurs midway, the graph mode will not be reset. + graph().readMode(GraphReadMode.OLTP_ONLY); + } + protected void initPersonIndex(boolean indexCity) { SchemaManager schema = graph().schema(); @@ -6247,7 +6254,7 @@ public void testAddVertexPropertyWithSpecialValueForSecondaryIndex() { String backend = graph.backend(); Set nonZeroBackends = ImmutableSet.of("postgresql", - "rocksdb", "hbase"); + "rocksdb", "hbase", "hstore"); if (nonZeroBackends.contains(backend)) { Assert.assertThrows(Exception.class, () -> { graph.addVertex(T.label, "person", "name", "0", @@ -9064,7 +9071,7 @@ public void testQueryBySearchIndexWithSpecialSymbol() { Assert.assertEquals(0, vertices.size()); String backend = graph.backend(); - if (ImmutableSet.of("rocksdb", "hbase").contains(backend)) { + if (ImmutableSet.of("rocksdb", "hbase", "hstore").contains(backend)) { Assert.assertThrows(Exception.class, () -> { graph.addVertex(T.label, "person", "name", "0", "city", "xyz\u0000efg", "age", 0); diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java index 034e3a1b84..9ef6d9affd 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java @@ -144,7 +144,7 @@ protected void clearSchema() { TaskScheduler scheduler = this.graph.taskScheduler(); scheduler.tasks(null, -1, null).forEachRemaining(elem -> { - scheduler.delete(elem.id()); + scheduler.delete(elem.id(), false); }); } diff --git a/hugegraph-store/.gitignore b/hugegraph-store/.gitignore index afb9ae8375..52458797e5 100644 --- a/hugegraph-store/.gitignore +++ b/hugegraph-store/.gitignore @@ -1,2 +1,5 @@ # Exclude the generated PB files hg-store-grpc/src/main/java/ + +# Exclude data files +**/storage/ diff --git a/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBOptions.java b/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBOptions.java index f2fab66bc6..9d380b3356 100644 --- a/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBOptions.java +++ b/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBOptions.java @@ -23,6 +23,8 @@ import static org.apache.hugegraph.config.OptionChecker.rangeDouble; import static org.apache.hugegraph.config.OptionChecker.rangeInt; +import java.util.Map; + import org.apache.hugegraph.config.ConfigConvOption; import org.apache.hugegraph.config.ConfigListConvOption; import org.apache.hugegraph.config.ConfigOption; @@ -82,18 +84,20 @@ public class RocksDBOptions extends OptionHolder { "" ); - public static final ConfigOption LOG_LEVEL = + public static final ConfigOption LOG_LEVEL = new ConfigOption<>( "rocksdb.log_level", "The info log level of RocksDB.", - allowValues(InfoLogLevel.DEBUG_LEVEL, - InfoLogLevel.INFO_LEVEL, - InfoLogLevel.WARN_LEVEL, - InfoLogLevel.ERROR_LEVEL, - InfoLogLevel.FATAL_LEVEL, - InfoLogLevel.HEADER_LEVEL), - InfoLogLevel.INFO_LEVEL - ); + allowValues("DEBUG", "INFO", "WARN", "ERROR", "FATAL", "HEADER"), + "INFO" + ); + public static final Map LOG_LEVEL_MAPPING = + Map.of("DEBUG", InfoLogLevel.DEBUG_LEVEL, + "INFO", InfoLogLevel.INFO_LEVEL, + "WARN", InfoLogLevel.WARN_LEVEL, + "ERROR", InfoLogLevel.ERROR_LEVEL, + "FATAL", InfoLogLevel.FATAL_LEVEL, + "HEADER", InfoLogLevel.HEADER_LEVEL); public static final ConfigOption NUM_LEVELS = new ConfigOption<>( diff --git a/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBSession.java b/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBSession.java index fe27183f18..70299f8f09 100644 --- a/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBSession.java +++ b/hugegraph-store/hg-store-rocksdb/src/main/java/org/apache/hugegraph/rocksdb/access/RocksDBSession.java @@ -143,7 +143,8 @@ public static void initOptions(HugeConfig conf, db.setAllowConcurrentMemtableWrite(true); db.setEnableWriteThreadAdaptiveYield(true); } - db.setInfoLogLevel(conf.get(RocksDBOptions.LOG_LEVEL)); + db.setInfoLogLevel( + RocksDBOptions.LOG_LEVEL_MAPPING.get(conf.get(RocksDBOptions.LOG_LEVEL))); db.setMaxSubcompactions(conf.get(RocksDBOptions.MAX_SUB_COMPACTIONS)); db.setAllowMmapWrites(conf.get(RocksDBOptions.ALLOW_MMAP_WRITES)); db.setAllowMmapReads(conf.get(RocksDBOptions.ALLOW_MMAP_READS));