From ed9e4c6449b9a1e8a003092776afdb5dcefd6036 Mon Sep 17 00:00:00 2001 From: haerdib <73821294+haerdib@users.noreply.github.com> Date: Mon, 9 May 2022 11:45:55 +0200 Subject: [PATCH] Upstream merge and fix unit tests (#40) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add a local-setup config to the tutorial (integritee book) (#666) * Update call status to `InSidechainBlock` only after block import (#676) * rename on_block_created to on_block_imported * update tx status only after block import * fix clippy * fix cargo test * remove waiting time from demos * readd removed comments * udpate comments * extract call removal logic from on_block_imported * Bump substrate to commit f5f286db0da9... (#669) * Bump substrate Bump substrate to commit 8df8d908c4d77a8dd19751784b6aca62159ddda8 Remove dependencies of scale-info 1.0.0 and parity-scale-codec 2.3.1 Upgrade substrate to commit f5f286d... substrate fix for : sp-core's full_crypto feature flag breaks no_std builds * frame-metadata from crates.io * Update CI to use updated node Co-authored-by: echevrier * Dockerize (#668) * Dockerize the binaries #579 * Add tags to run as well #579 * Fix running binary in docker #579 * Add more files to docker #579 * Add conditional runs * Add +x earlier * Rename docker-service to integritee-demo-validateer. Fixes #579 * Lift clap to version 3.16 and move stf cli to cli crate. (#679) * Lift clap to version 3.16 and move stf cli to cli crate. * Adapt demo script parameters * Add cli examples to README.md * update comment descriptions Co-authored-by: Gaudenz Kessler * Refactor global components and initialization process (#677) * Consistently (re-)use the component container * RPC server is initialized before registering on the parentchain * Fix issue with sidechain block import when latest parentchain block is already too new Closes #545 #600 #684 #683 * create alive service to deterine that the service is up, running and registered (#697) * create alive service * replace alive with initialized Co-authored-by: Gaudenz Kessler * Add port config for the untrusted http server for `is_initialized` (#700) * rename wrong `signing_key` function name to `state_key` (#704) * Move top pool and top pool author crates to core primitives (#705) * Introduce state snapshot history (#698) Closes #688 * Add header to sidechain block (#699) Closes #686 Co-authored-by: Gaudenz Kessler * introduce layer for indirection for sidechainblock (#716) Closes #710 Co-authored-by: Gaudenz Kessler Co-authored-by: Felix Müller * Persist web-socket connections (#718) Complete overhaul of the trusted web-socket server: * using MIO to serve concurrent connections * Server keeps connections open until a client requests a close * Changed our clients to match this pattern * Upgrade to polkadot v0.9.19 (#720) * Bump substrate to polkadot-v0.9.19 Bump RUNTIME_SPEC_VERSION to 9 Set substrate-api-client to polkadot-v0.9.19 Set integritee-node to polkadot-v0.9.19 Set integritee-node to master * Cargo update + reabse * Update github actions Co-authored-by: echevrier * fix some cargo.tomls * fix cargo.lock * remove game engine * update teerex module * fix itp-registry-storage * cargo update * resinsert game engine * some further clean up * update teaclave * add ajuna commands * cargo update * carog update * update ajuna cli * fix some thins * Signed sidechain block * fix shard_id getter * fix load_initialized * fix sgx externalities import * fix compilation issues * remove patches and cargo update * update demo docu * add bin folder to docu * remove hard coded ports. Not necesasry * add ./ * update pallet verions * make tests compile again * fix import in sgx mode * merge from upstream number two.. remove itp storage verifier * cargo fmt * fix tests * update doc once again * fix script and queue game * cargo update + some code clean up Co-authored-by: Felix Müller Co-authored-by: echevrier <84318241+echevrier@users.noreply.github.com> Co-authored-by: echevrier Co-authored-by: mosonyi Co-authored-by: gaudenzkessler <92718752+gaudenzkessler@users.noreply.github.com> Co-authored-by: Gaudenz Kessler Co-authored-by: Felix Müller --- .github/workflows/build_and_test.yml | 79 +- .github/workflows/delete-release.yml | 65 + .github/workflows/publish-docker-release.yml | 64 + Cargo.lock | 2324 +++++++++-------- Cargo.toml | 17 +- Dockerfile | 23 + app-libs/stf/Cargo.toml | 37 +- app-libs/stf/src/cli.rs | 514 ---- app-libs/stf/src/helpers.rs | 10 +- app-libs/stf/src/lib.rs | 14 +- app-libs/stf/src/stf_sgx_primitives.rs | 14 +- cli/Cargo.toml | 27 +- cli/README.md | 21 +- cli/demo_connect_four.sh | 53 +- cli/demo_connect_four_two_workers.sh | 53 +- cli/demo_direct_call.sh | 14 +- cli/demo_private_tx.sh | 12 +- cli/demo_shielding_unshielding.sh | 14 +- cli/demo_sidechain.sh | 26 +- cli/src/command_utils.rs | 91 + cli/src/commands.rs | 392 +++ cli/src/main.rs | 924 +------ cli/src/trusted_command_utils.rs | 87 + cli/src/trusted_commands.rs | 316 +++ cli/src/trusted_operation.rs | 255 ++ core-primitives/block-import-queue/Cargo.toml | 2 +- .../component-container/Cargo.toml | 11 +- .../src/component_container.rs | 38 +- .../component-container/src/error.rs | 25 +- .../component-container/src/lib.rs | 7 + core-primitives/enclave-api/Cargo.toml | 10 +- core-primitives/enclave-api/ffi/src/lib.rs | 12 + .../enclave-api/src/enclave_base.rs | 29 + core-primitives/enclave-metrics/Cargo.toml | 2 +- core-primitives/extrinsics-factory/Cargo.toml | 8 +- .../node-api-extensions/Cargo.toml | 10 +- core-primitives/ocall-api/Cargo.toml | 17 +- core-primitives/ocall-api/src/lib.rs | 27 +- core-primitives/primitives-cache/src/lib.rs | 20 +- .../primitives-cache/src/primitives_cache.rs | 8 +- core-primitives/registry-storage/Cargo.toml | 8 +- core-primitives/settings/src/lib.rs | 8 +- core-primitives/sgx/crypto/Cargo.toml | 4 +- core-primitives/sgx/crypto/src/aes.rs | 25 +- core-primitives/sgx/crypto/src/ed25519.rs | 21 +- core-primitives/sgx/crypto/src/lib.rs | 2 +- core-primitives/sgx/crypto/src/mocks.rs | 61 +- core-primitives/sgx/crypto/src/rsa3072.rs | 25 +- core-primitives/sgx/io/src/lib.rs | 34 +- core-primitives/stf-executor/Cargo.toml | 11 +- core-primitives/stf-executor/src/error.rs | 10 +- core-primitives/stf-executor/src/executor.rs | 51 +- .../stf-executor/src/executor_tests.rs | 27 +- core-primitives/stf-state-handler/Cargo.toml | 10 +- .../stf-state-handler/src/error.rs | 14 +- .../stf-state-handler/src/file_io.rs | 455 +++- .../src/global_file_state_handler.rs | 83 - .../stf-state-handler/src/handle_state.rs | 32 +- .../src/in_memory_state_file_io.rs | 380 +++ core-primitives/stf-state-handler/src/lib.rs | 23 +- .../src/query_shard_state.rs | 2 +- .../stf-state-handler/src/state_handler.rs | 180 ++ .../src/state_key_repository.rs | 96 + .../src/state_snapshot_primitives.rs | 55 + .../src/state_snapshot_repository.rs | 443 ++++ .../src/state_snapshot_repository_loader.rs | 202 ++ .../stf-state-handler/src/test/mocks/mod.rs | 19 + .../test/mocks/state_key_repository_mock.rs | 68 + .../test/mocks/versioned_state_access_mock.rs | 100 + .../stf-state-handler/src/test/mod.rs | 25 + .../stf-state-handler/src/test/sgx_tests.rs | 340 +++ .../stf-state-handler/src/tests.rs | 175 -- core-primitives/storage-verified/Cargo.toml | 45 - core-primitives/storage-verified/src/lib.rs | 120 - core-primitives/storage/Cargo.toml | 16 +- core-primitives/storage/src/keys.rs | 2 +- core-primitives/storage/src/lib.rs | 2 +- core-primitives/teerex-storage/Cargo.toml | 2 +- core-primitives/test/Cargo.toml | 14 +- .../test/src/mock/handle_state_mock.rs | 79 +- core-primitives/test/src/mock/mod.rs | 2 - .../test/src/mock/ocall_api_mock.rs | 164 -- core-primitives/test/src/mock/onchain_mock.rs | 100 +- core-primitives/time-utils/src/lib.rs | 5 + .../top-pool-author}/Cargo.toml | 31 +- .../top-pool-author}/src/api.rs | 4 +- .../top-pool-author}/src/author.rs | 25 +- .../top-pool-author}/src/author_tests.rs | 6 +- .../top-pool-author}/src/client_error.rs | 4 +- .../top-pool-author}/src/error.rs | 2 +- .../top-pool-author}/src/lib.rs | 2 - .../top-pool-author}/src/test_utils.rs | 4 +- .../top-pool-author}/src/top_filter.rs | 0 .../top-pool-author}/src/traits.rs | 8 +- .../top-pool/Cargo.toml | 13 +- .../top-pool/src/base_pool.rs | 0 .../top-pool/src/basic_pool.rs | 4 +- .../top-pool/src/error.rs | 0 .../top-pool/src/future.rs | 0 .../top-pool/src/lib.rs | 3 + .../top-pool/src/listener.rs | 0 core-primitives/top-pool/src/mocks/mod.rs | 22 + .../top-pool/src/mocks/rpc_responder_mock.rs | 0 .../src/mocks}/trusted_operation_pool_mock.rs | 12 +- .../top-pool/src/pool.rs | 0 .../top-pool/src/primitives.rs | 2 +- .../top-pool/src/ready.rs | 0 .../top-pool/src/rotator.rs | 0 .../top-pool/src/tracked_map.rs | 0 .../top-pool/src/validated_pool.rs | 2 +- .../top-pool/src/watcher.rs | 0 core-primitives/types/Cargo.toml | 16 +- core/direct-rpc-server/Cargo.toml | 4 +- core/direct-rpc-server/src/lib.rs | 25 +- .../src/mocks/connection_mock.rs | 108 - core/direct-rpc-server/src/mocks/mod.rs | 3 +- ...dates_sink.rs => response_channel_mock.rs} | 41 +- .../direct-rpc-server/src/response_channel.rs | 26 + .../src/rpc_connection_registry.rs | 48 +- core/direct-rpc-server/src/rpc_responder.rs | 152 +- core/direct-rpc-server/src/rpc_ws_handler.rs | 96 +- .../block-import-dispatcher/Cargo.toml | 2 +- .../src/triggered_dispatcher.rs | 7 + core/parentchain/block-importer/Cargo.toml | 11 +- .../block-importer/src/block_importer.rs | 41 +- core/parentchain/block-importer/src/error.rs | 4 +- .../indirect-calls-executor/Cargo.toml | 8 +- .../src/indirect_calls_executor.rs | 2 +- core/parentchain/light-client/Cargo.toml | 16 +- .../light-client/src/concurrent_access.rs | 16 +- core/parentchain/light-client/src/io.rs | 12 +- core/parentchain/light-client/src/lib.rs | 7 +- .../src/mocks/validator_mock_seal.rs | 8 +- core/rpc-client/Cargo.toml | 10 +- core/rpc-client/src/direct_client.rs | 115 +- core/rpc-client/src/mock.rs | 4 + core/rpc-client/src/ws_client.rs | 113 +- core/rpc-server/Cargo.toml | 4 +- core/tls-websocket-server/Cargo.toml | 33 +- .../src/config_provider.rs | 45 + core/tls-websocket-server/src/connection.rs | 308 ++- .../src/connection_id_generator.rs | 76 + core/tls-websocket-server/src/error.rs | 55 + core/tls-websocket-server/src/lib.rs | 172 +- core/tls-websocket-server/src/stream_state.rs | 104 + .../src/test/fixtures/mod.rs | 22 + .../src/test/fixtures/no_cert_verifier.rs | 51 + .../src/test/fixtures/test_cert.rs | 139 + .../src/test/fixtures/test_private_key.rs | 53 + .../src/test/fixtures/test_server.rs | 41 + .../fixtures/test_server_config_provider.rs | 43 + .../src/test/mocks/mod.rs | 19 + .../test/mocks/web_socket_connection_mock.rs | 103 + .../src/test/mocks/web_socket_handler_mock.rs | 61 + .../tls-websocket-server/src/test}/mod.rs | 3 +- .../src/{common.rs => tls_common.rs} | 8 +- core/tls-websocket-server/src/ws_server.rs | 427 ++- enclave-runtime/Cargo.lock | 1194 ++++----- enclave-runtime/Cargo.toml | 43 +- enclave-runtime/Enclave.edl | 6 + enclave-runtime/src/attestation.rs | 6 +- enclave-runtime/src/error.rs | 6 +- enclave-runtime/src/global_components.rs | 129 +- enclave-runtime/src/initialization.rs | 308 +++ enclave-runtime/src/lib.rs | 289 +- enclave-runtime/src/ocall/metrics_ocall.rs | 2 +- enclave-runtime/src/ocall/on_chain_ocall.rs | 38 +- enclave-runtime/src/rpc/mod.rs | 1 + .../src/rpc/rpc_response_channel.rs | 40 + enclave-runtime/src/rpc/worker_api_direct.rs | 6 +- enclave-runtime/src/test/cert_tests.rs | 7 +- .../test/fixtures/initialize_test_state.rs | 3 +- .../test/mocks/propose_to_import_call_mock.rs | 23 +- enclave-runtime/src/test/mocks/types.rs | 26 +- .../src/test/sidechain_aura_tests.rs | 48 +- enclave-runtime/src/tests.rs | 147 +- enclave-runtime/src/tls_ra/mocks.rs | 14 +- enclave-runtime/src/tls_ra/mod.rs | 4 +- enclave-runtime/src/tls_ra/seal_handler.rs | 100 +- enclave-runtime/src/tls_ra/tests.rs | 10 +- enclave-runtime/src/tls_ra/tls_ra_client.rs | 27 +- enclave-runtime/src/tls_ra/tls_ra_server.rs | 30 +- enclave-runtime/src/top_pool_execution.rs | 91 +- local-setup/github-action-config.json | 8 +- local-setup/simple-config.json | 8 +- local-setup/tutorial-config.json | 52 + rust-sgx-sdk/edl/intel/sgx_ttls.edl | 62 + rust-sgx-sdk/version | 2 +- service/Cargo.toml | 23 +- service/src/cli.yml | 8 +- service/src/config.rs | 18 + service/src/error.rs | 21 +- service/src/globals/mod.rs | 1 - service/src/globals/worker.rs | 70 - service/src/initialized_service.rs | 54 + service/src/main.rs | 269 +- service/src/ocall_bridge/sidechain_ocall.rs | 9 +- service/src/sync_block_gossiper.rs | 28 +- service/src/sync_state.rs | 6 +- service/src/tests/commons.rs | 1 + service/src/tests/ecalls.rs | 7 +- service/src/tests/mock.rs | 17 + service/src/tests/mocks/enclave_api_mock.rs | 8 + service/src/tests/mod.rs | 3 - service/src/tests/worker.rs | 36 - service/src/worker.rs | 83 +- service/src/worker_peers_updater.rs | 34 +- sidechain/block-composer/Cargo.toml | 12 +- .../block-composer/src/block_composer.rs | 58 +- sidechain/block-composer/src/error.rs | 2 +- sidechain/consensus/aura/Cargo.toml | 21 +- .../consensus/aura/src/block_importer.rs | 82 +- sidechain/consensus/aura/src/lib.rs | 15 +- .../consensus/aura/src/proposer_factory.rs | 8 +- sidechain/consensus/aura/src/slot_proposer.rs | 8 +- .../aura/src/test/block_importer_tests.rs | 56 +- .../consensus/aura/src/test/fixtures/types.rs | 7 +- .../aura/src/test/mocks/proposer_mock.rs | 14 +- sidechain/consensus/aura/src/verifier.rs | 230 +- sidechain/consensus/common/Cargo.toml | 8 +- .../consensus/common/src/block_import.rs | 22 +- .../common/src/block_import_queue_worker.rs | 5 +- sidechain/consensus/common/src/lib.rs | 2 +- .../consensus/common/src/peer_block_sync.rs | 17 +- .../common/src/test/mocks/verifier_mock.rs | 2 +- sidechain/consensus/slots/Cargo.toml | 9 +- sidechain/consensus/slots/src/lib.rs | 5 +- sidechain/consensus/slots/src/slots.rs | 52 +- sidechain/primitives/Cargo.toml | 8 +- sidechain/primitives/src/traits/mod.rs | 70 +- sidechain/primitives/src/types/block.rs | 116 +- sidechain/primitives/src/types/block_data.rs | 82 + sidechain/primitives/src/types/header.rs | 67 + sidechain/primitives/src/types/mod.rs | 2 + sidechain/rpc-handler/Cargo.toml | 10 +- .../rpc-handler/src/direct_top_pool_api.rs | 10 +- sidechain/rpc-handler/src/import_block_api.rs | 2 +- sidechain/sidechain-crate/Cargo.toml | 7 - sidechain/sidechain-crate/src/lib.rs | 4 - sidechain/state/Cargo.toml | 10 +- sidechain/storage/Cargo.toml | 5 +- sidechain/storage/src/storage.rs | 71 +- sidechain/storage/src/test_utils.rs | 51 +- sidechain/test/Cargo.toml | 7 +- sidechain/test/src/lib.rs | 2 + sidechain/test/src/sidechain_block_builder.rs | 95 +- .../test/src/sidechain_block_data_builder.rs | 97 + .../test/src/sidechain_header_builder.rs | 80 + sidechain/top-pool-executor/Cargo.toml | 13 +- .../top-pool-executor/src/call_operator.rs | 37 +- .../src/call_operator_mock.rs | 16 +- sidechain/top-pool-executor/src/error.rs | 2 +- .../top-pool-executor/src/getter_operator.rs | 15 +- sidechain/top-pool-executor/src/lib.rs | 26 +- .../top-pool-rpc-author/src/initializer.rs | 55 - sidechain/validateer-fetch/Cargo.toml | 12 +- sidechain/validateer-fetch/src/error.rs | 2 +- sidechain/validateer-fetch/src/validateer.rs | 4 +- 258 files changed, 10670 insertions(+), 6387 deletions(-) create mode 100644 .github/workflows/delete-release.yml create mode 100644 .github/workflows/publish-docker-release.yml create mode 100644 Dockerfile delete mode 100644 app-libs/stf/src/cli.rs mode change 100644 => 100755 cli/demo_connect_four.sh create mode 100644 cli/src/command_utils.rs create mode 100644 cli/src/commands.rs create mode 100644 cli/src/trusted_command_utils.rs create mode 100644 cli/src/trusted_commands.rs create mode 100644 cli/src/trusted_operation.rs rename sidechain/top-pool-rpc-author/src/pool_types.rs => core-primitives/component-container/src/error.rs (51%) delete mode 100644 core-primitives/stf-state-handler/src/global_file_state_handler.rs create mode 100644 core-primitives/stf-state-handler/src/in_memory_state_file_io.rs create mode 100644 core-primitives/stf-state-handler/src/state_handler.rs create mode 100644 core-primitives/stf-state-handler/src/state_key_repository.rs create mode 100644 core-primitives/stf-state-handler/src/state_snapshot_primitives.rs create mode 100644 core-primitives/stf-state-handler/src/state_snapshot_repository.rs create mode 100644 core-primitives/stf-state-handler/src/state_snapshot_repository_loader.rs create mode 100644 core-primitives/stf-state-handler/src/test/mocks/mod.rs create mode 100644 core-primitives/stf-state-handler/src/test/mocks/state_key_repository_mock.rs create mode 100644 core-primitives/stf-state-handler/src/test/mocks/versioned_state_access_mock.rs create mode 100644 core-primitives/stf-state-handler/src/test/mod.rs create mode 100644 core-primitives/stf-state-handler/src/test/sgx_tests.rs delete mode 100644 core-primitives/stf-state-handler/src/tests.rs delete mode 100644 core-primitives/storage-verified/Cargo.toml delete mode 100644 core-primitives/storage-verified/src/lib.rs delete mode 100644 core-primitives/test/src/mock/ocall_api_mock.rs rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/Cargo.toml (65%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/api.rs (99%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/author.rs (94%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/author_tests.rs (96%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/client_error.rs (98%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/error.rs (97%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/lib.rs (97%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/test_utils.rs (95%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/top_filter.rs (100%) rename {sidechain/top-pool-rpc-author => core-primitives/top-pool-author}/src/traits.rs (93%) rename {sidechain => core-primitives}/top-pool/Cargo.toml (84%) rename {sidechain => core-primitives}/top-pool/src/base_pool.rs (100%) rename {sidechain => core-primitives}/top-pool/src/basic_pool.rs (97%) rename {sidechain => core-primitives}/top-pool/src/error.rs (100%) rename {sidechain => core-primitives}/top-pool/src/future.rs (100%) rename {sidechain => core-primitives}/top-pool/src/lib.rs (97%) rename {sidechain => core-primitives}/top-pool/src/listener.rs (100%) create mode 100644 core-primitives/top-pool/src/mocks/mod.rs rename {sidechain => core-primitives}/top-pool/src/mocks/rpc_responder_mock.rs (100%) rename core-primitives/{test/src/mock => top-pool/src/mocks}/trusted_operation_pool_mock.rs (96%) rename {sidechain => core-primitives}/top-pool/src/pool.rs (100%) rename {sidechain => core-primitives}/top-pool/src/primitives.rs (99%) rename {sidechain => core-primitives}/top-pool/src/ready.rs (100%) rename {sidechain => core-primitives}/top-pool/src/rotator.rs (100%) rename {sidechain => core-primitives}/top-pool/src/tracked_map.rs (100%) rename {sidechain => core-primitives}/top-pool/src/validated_pool.rs (99%) rename {sidechain => core-primitives}/top-pool/src/watcher.rs (100%) delete mode 100644 core/direct-rpc-server/src/mocks/connection_mock.rs rename core/direct-rpc-server/src/mocks/{updates_sink.rs => response_channel_mock.rs} (51%) create mode 100644 core/direct-rpc-server/src/response_channel.rs create mode 100644 core/tls-websocket-server/src/config_provider.rs create mode 100644 core/tls-websocket-server/src/connection_id_generator.rs create mode 100644 core/tls-websocket-server/src/error.rs create mode 100644 core/tls-websocket-server/src/stream_state.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/mod.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/no_cert_verifier.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/test_cert.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/test_private_key.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/test_server.rs create mode 100644 core/tls-websocket-server/src/test/fixtures/test_server_config_provider.rs create mode 100644 core/tls-websocket-server/src/test/mocks/mod.rs create mode 100644 core/tls-websocket-server/src/test/mocks/web_socket_connection_mock.rs create mode 100644 core/tls-websocket-server/src/test/mocks/web_socket_handler_mock.rs rename {sidechain/top-pool/src/mocks => core/tls-websocket-server/src/test}/mod.rs (94%) rename core/tls-websocket-server/src/{common.rs => tls_common.rs} (98%) create mode 100644 enclave-runtime/src/initialization.rs create mode 100644 enclave-runtime/src/rpc/rpc_response_channel.rs create mode 100644 local-setup/tutorial-config.json create mode 100644 rust-sgx-sdk/edl/intel/sgx_ttls.edl delete mode 100644 service/src/globals/worker.rs create mode 100644 service/src/initialized_service.rs delete mode 100644 service/src/tests/worker.rs create mode 100644 sidechain/primitives/src/types/block_data.rs create mode 100644 sidechain/primitives/src/types/header.rs create mode 100644 sidechain/test/src/sidechain_block_data_builder.rs create mode 100644 sidechain/test/src/sidechain_header_builder.rs delete mode 100644 sidechain/top-pool-rpc-author/src/initializer.rs diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 875f61ec44..1c212dd0f8 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -4,6 +4,8 @@ on: workflow_dispatch: push: branches: [ master ] + tags: + - '[0-9]+.[0-9]+.[0-9]+' pull_request: branches: [ master ] @@ -25,7 +27,7 @@ jobs: build-test: runs-on: ubuntu-latest - container: "integritee/integritee-dev:0.1.7" + container: "integritee/integritee-dev:0.1.9" steps: - uses: actions/checkout@v2 - name: init rust @@ -60,7 +62,7 @@ jobs: clippy: runs-on: ubuntu-latest - container: "integritee/integritee-dev:0.1.7" + container: "integritee/integritee-dev:0.1.9" steps: - uses: actions/checkout@v2 - name: init rust @@ -95,7 +97,7 @@ jobs: integration-tests: runs-on: ubuntu-latest needs: build-test - container: "integritee/integritee-dev:0.1.7" + container: "integritee/integritee-dev:0.1.9" strategy: fail-fast: false matrix: @@ -172,10 +174,10 @@ jobs: with: github_token: ${{secrets.GITHUB_TOKEN}} workflow: ci.yml - name: integritee-node-dev-fb4bac9880a8e742f8862bcdadd0f0b0bd2624a7 + name: integritee-node-dev-5da191f98425a3217df413e89126e8c6f7efcb8a # in fact this action should download the latest artifact, but sometimes fails. Then we need to # set the `run_id` to force a download of an updated binary. - run_id: 1807673902 + run_id: 2239021033 path: node repo: integritee-network/integritee-node @@ -207,8 +209,8 @@ jobs: # * `set -eo pipefail` is needed to return an error even if piped to `tee`. shell: bash --noprofile --norc -eo pipefail {0} run: | - touch ${{ env.LOG_DIR}}/local-setup.log - ./local-setup/launch.py local-setup/github-action-config.json 2>&1 | tee ${{ env.LOG_DIR}}/local-setup.log & + touch ${{ env.LOG_DIR }}/local-setup.log + ./local-setup/launch.py local-setup/github-action-config.json 2>&1 | tee -i ${{ env.LOG_DIR }}/local-setup.log & sleep 150 - name: ${{ matrix.demo_name }} @@ -225,3 +227,66 @@ jobs: with: name: ${{ matrix.test }}_logs path: ${{ env.LOG_DIR }} + + release: + name: Draft Release + if: startsWith(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + needs: [build-test, integration-tests] + outputs: + release_url: ${{ steps.create-release.outputs.html_url }} + asset_upload_url: ${{ steps.create-release.outputs.upload_url }} + steps: + - uses: actions/checkout@v2 + + - name: Download Integritee Service + uses: actions/download-artifact@v2 + with: + name: integritee-worker-${{ github.sha }} + path: integritee-worker-tmp + + - name: Download Integritee Client + uses: actions/download-artifact@v2 + with: + name: integritee-client-${{ github.sha }} + path: integritee-client-tmp + + - name: Download Enclave Signed + uses: actions/download-artifact@v2 + with: + name: enclave-signed-${{ github.sha }} + path: enclave-signed-tmp + + - name: Move service binaries + run: mv integritee-worker-tmp/integritee-service ./integritee-demo-validateer + + - name: Move service client binaries + run: mv integritee-client-tmp/integritee-cli ./integritee-client + + - name: Move service client binaries + run: mv enclave-signed-tmp/enclave.signed.so ./enclave.signed.so + + - name: Create required package.json + run: test -f package.json || echo '{}' >package.json + + - name: Changelog + uses: scottbrenner/generate-changelog-action@master + id: Changelog + + - name: Display structure of downloaded files + run: ls -R + working-directory: . + + - name: Release + id: create-release + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + body: | + ${{ steps.Changelog.outputs.changelog }} + draft: true + files: | + integritee-client + integritee-demo-validateer + enclave.signed.so \ No newline at end of file diff --git a/.github/workflows/delete-release.yml b/.github/workflows/delete-release.yml new file mode 100644 index 0000000000..a33ab52a9d --- /dev/null +++ b/.github/workflows/delete-release.yml @@ -0,0 +1,65 @@ +name: Delete-Release + +on: + release: + types: [deleted] # should be deleted + +jobs: + purge-image: + name: Delete image from ghcr.io + runs-on: ubuntu-latest + strategy: + matrix: + binary: ["integritee-client", "integritee-demo-validateer"] + steps: + - uses: actions/checkout@v2 + + - name: Set output + id: vars + run: echo ::set-output name=tag::${GITHUB_REF#refs/*/} + + - name: Check output + env: + RELEASE_VERSION: ${{ steps.vars.outputs.tag }} + run: | + echo $RELEASE_VERSION + echo ${{ steps.vars.outputs.tag }} + echo ${{github.event.pull_request.number}} + + - name: Login to DockerHub + if: github.event_name != 'pull_request' + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + # Unfortunately accessing the repo with personal access token is not possible + # Workaround: disable 2FA and user password instead of TOKEN + - name: Delete docker tag + run: | + ORGANIZATION="integritee" + IMAGE="${{ matrix.binary }}" + TAG="${{ steps.vars.outputs.tag }}" + + login_data() { + cat < ~/mrenclave.b58; fi + +# checks +RUN ldd /usr/local/bin/integritee && \ + /usr/local/bin/integritee --version + +ENTRYPOINT ["/usr/local/bin/integritee"] \ No newline at end of file diff --git a/app-libs/stf/Cargo.toml b/app-libs/stf/Cargo.toml index ff4b7623f5..16b6b3a2d1 100644 --- a/app-libs/stf/Cargo.toml +++ b/app-libs/stf/Cargo.toml @@ -17,8 +17,6 @@ sgx = [ std = [ # crates.io "base58", - "clap", - "clap-nested", "codec/std", "hex", "log/std", @@ -48,9 +46,7 @@ test = [] [dependencies] # crates.io base58 = { version = "0.1", optional = true } -clap = { version = "2.33", optional = true } -clap-nested = { version = "0.3.1", optional = true } -codec = { version = "2.0.0", default-features = false, features = ["derive"], package = "parity-scale-codec" } +codec = { version = "3.0.0", default-features = false, features = ["derive"], package = "parity-scale-codec" } derive_more = { version = "0.99.5" } hex = { version = "0.4.2", optional = true } log = { version = "0.4", default-features = false } @@ -67,29 +63,22 @@ its-primitives = { default-features = false, path = "../../sidechain/primitives" its-state = { default-features = false, optional = true, path = "../../sidechain/state" } # Substrate dependencies -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master", features = ["full_crypto"] } -balances = { version = "4.0.0-dev", package = 'pallet-balances', default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -system = { version = "4.0.0-dev", package = "frame-system", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -support = { version = "4.0.0-dev", package = "frame-support", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-application-crypto = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sc-keystore = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master", optional = true } +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19", features = ["full_crypto"] } +balances = { version = "4.0.0-dev", package = 'pallet-balances', default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +system = { version = "4.0.0-dev", package = "frame-system", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +support = { version = "4.0.0-dev", package = "frame-support", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-application-crypto = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sc-keystore = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19", optional = true } # scs / integritee / ajuna -my-node-runtime = { package = "ajuna-runtime", git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5", optional = true } +my-node-runtime = { package = "ajuna-solo-runtime", git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup", optional = true } +pallet-ajuna-connectfour = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master" } sgx-runtime = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", optional = true } sp-io = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", features = ["disable_oom", "disable_panic_handler", "disable_allocator"], optional = true } -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master", optional = true } -substrate-client-keystore = { git = "https://github.com/scs/substrate-api-client", branch = "master", optional = true } -pallet-ajuna-connectfour = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } - - +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19", optional = true } +substrate-client-keystore = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19", optional = true } [dev-dependencies] -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } - -#[patch."https://github.com/ajuna-network/sgx-runtime"] -#sgx-runtime = { path = "../../../ajuna-sgx-runtime/runtime" } -#sp-io = { path = "../../../ajuna-sgx-runtime/substrate-sgx/sp-io" } -#sgx-externalities = { path = "../../../ajuna-sgx-runtime/substrate-sgx/externalities" } +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } diff --git a/app-libs/stf/src/cli.rs b/app-libs/stf/src/cli.rs deleted file mode 100644 index dcaec847d8..0000000000 --- a/app-libs/stf/src/cli.rs +++ /dev/null @@ -1,514 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -use crate::{ - AccountId, Index, KeyPair, ShardIdentifier, TrustedCall, TrustedGetter, TrustedOperation, -}; -use base58::{FromBase58, ToBase58}; -use clap::{AppSettings, Arg, ArgMatches}; -use clap_nested::{Command, Commander, MultiCommand}; -use codec::{Decode, Encode}; -use log::*; -use sp_application_crypto::{ed25519, sr25519}; -use sp_core::{crypto::Ss58Codec, sr25519 as sr25519_core, Pair}; -use sp_runtime::traits::IdentifyAccount; -use std::path::PathBuf; -use substrate_client_keystore::{KeystoreExt, LocalKeystore}; - -const VERSION: &str = env!("CARGO_PKG_VERSION"); -const KEYSTORE_PATH: &str = "my_trusted_keystore"; - -pub fn cmd<'a>( - perform_operation: &'a dyn Fn(&ArgMatches<'_>, &TrustedOperation) -> Option>, -) -> MultiCommand<'a, str, str> { - macro_rules! get_layer_two_nonce { - ($signer_pair:ident, $matches:ident ) => {{ - let top: TrustedOperation = - TrustedGetter::nonce(sr25519_core::Public::from($signer_pair.public()).into()) - .sign(&KeyPair::Sr25519($signer_pair.clone())) - .into(); - let res = perform_operation($matches, &top); - let nonce: Index = if let Some(n) = res { - if let Ok(nonce) = Index::decode(&mut n.as_slice()) { - nonce - } else { - 0 - } - } else { - 0 - }; - debug!("got layer two nonce: {:?}", nonce); - nonce - }}; - } - Commander::new() - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("mrenclave") - .short("m") - .long("mrenclave") - .global(true) - .takes_value(true) - .value_name("STRING") - .help("targeted worker MRENCLAVE"), - ) - .arg( - Arg::with_name("shard") - .short("s") - .long("shard") - .global(true) - .takes_value(true) - .value_name("STRING") - .help("shard identifier"), - ) - .arg( - Arg::with_name("xt-signer") - .short("a") - .long("xt-signer") - .global(true) - .takes_value(true) - .value_name("AccountId") - .default_value("//Alice") - .help("signer for publicly observable extrinsic"), - ) - .arg( - Arg::with_name("direct") - .short("d") - .long("direct") - .global(true) - .help("insert if direct invocation call is desired"), - ) - .name("integritee-cli") - .version(VERSION) - .author("Integritee AG ") - .about("trusted calls to worker enclave") - .after_help("stf subcommands depend on the stf crate this has been built against") - }) - .add_cmd( - Command::new("new-account") - .description("generates a new incognito account for the given integritee shard") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let store = LocalKeystore::open(get_keystore_path(matches), None).unwrap(); - let key: sr25519::AppPair = store.generate().unwrap(); - drop(store); - println!("{}", key.public().to_ss58check()); - Ok(()) - }), - ) - .add_cmd( - Command::new("list-accounts") - .description("lists all accounts in keystore for the integritee chain") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let store = LocalKeystore::open(get_keystore_path(matches), None).unwrap(); - info!("sr25519 keys:"); - for pubkey in store.public_keys::().unwrap().into_iter() { - println!("{}", pubkey.to_ss58check()); - } - info!("ed25519 keys:"); - for pubkey in store.public_keys::().unwrap().into_iter() { - println!("{}", pubkey.to_ss58check()); - } - drop(store); - Ok(()) - }), - ) - .add_cmd( - Command::new("transfer") - .description("send funds from one incognito account to another") - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("from") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("sender's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("to") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("recipient's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("amount") - .takes_value(true) - .required(true) - .value_name("U128") - .help("amount to be transferred"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_from = matches.value_of("from").unwrap(); - let arg_to = matches.value_of("to").unwrap(); - let amount = matches - .value_of("amount") - .unwrap() - .parse() - .expect("amount can be converted to u128"); - let from = get_pair_from_str(matches, arg_from); - let to = get_accountid_from_str(arg_to); - let direct: bool = matches.is_present("direct"); - info!("from ss58 is {}", from.public().to_ss58check()); - info!("to ss58 is {}", to.to_ss58check()); - - println!( - "send trusted call transfer from {} to {}: {}", - from.public(), - to, - amount - ); - let (mrenclave, shard) = get_identifiers(matches); - let nonce = get_layer_two_nonce!(from, matches); - let top: TrustedOperation = - TrustedCall::balance_transfer(from.public().into(), to, amount) - .sign(&KeyPair::Sr25519(from), nonce, &mrenclave, &shard) - .into_trusted_operation(direct); - let _ = perform_operation(matches, &top); - Ok(()) - }), - ) - .add_cmd( - Command::new("set-balance") - .description("ROOT call to set some account balance to an arbitrary number") - .options(|app| { - app.arg( - Arg::with_name("account") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("sender's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("amount") - .takes_value(true) - .required(true) - .value_name("U128") - .help("amount to be transferred"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_who = matches.value_of("account").unwrap(); - let amount = matches - .value_of("amount") - .unwrap() - .parse() - .expect("amount can be converted to u128"); - let who = get_pair_from_str(matches, arg_who); - let signer = get_pair_from_str(matches, "//Alice"); - let direct: bool = matches.is_present("direct"); - info!("account ss58 is {}", who.public().to_ss58check()); - - println!("send trusted call set-balance({}, {})", who.public(), amount); - - let (mrenclave, shard) = get_identifiers(matches); - let nonce = get_layer_two_nonce!(signer, matches); - let top: TrustedOperation = TrustedCall::balance_set_balance( - signer.public().into(), - who.public().into(), - amount, - amount, - ) - .sign(&KeyPair::Sr25519(signer), nonce, &mrenclave, &shard) - .into_trusted_operation(direct); - let _ = perform_operation(matches, &top); - Ok(()) - }), - ) - .add_cmd( - Command::new("balance") - .description("query balance for incognito account in keystore") - .options(|app| { - app.arg( - Arg::with_name("accountid") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("AccountId in ss58check format"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_who = matches.value_of("accountid").unwrap(); - debug!("arg_who = {:?}", arg_who); - let who = get_pair_from_str(matches, arg_who); - let top: TrustedOperation = TrustedGetter::free_balance(who.public().into()) - .sign(&KeyPair::Sr25519(who)) - .into(); - let res = perform_operation(matches, &top); - debug!("received result for balance"); - let bal = if let Some(v) = res { - if let Ok(vd) = crate::Balance::decode(&mut v.as_slice()) { - vd - } else { - info!("could not decode value. maybe hasn't been set? {:x?}", v); - 0 - } - } else { - 0 - }; - println!("{}", bal); - Ok(()) - }), - ) - .add_cmd( - Command::new("unshield-funds") - .description("Transfer funds from an incognito account to an on-chain account") - .options(|app| { - app.arg( - Arg::with_name("from") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("Sender's incognito AccountId in ss58check format"), - ) - .arg( - Arg::with_name("to") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("Recipient's on-chain AccountId in ss58check format"), - ) - .arg( - Arg::with_name("amount") - .takes_value(true) - .required(true) - .value_name("U128") - .help("Amount to be transferred"), - ) - .arg( - Arg::with_name("shard") - .takes_value(true) - .required(true) - .value_name("STRING") - .help("Shard identifier"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_from = matches.value_of("from").unwrap(); - let arg_to = matches.value_of("to").unwrap(); - let amount = matches - .value_of("amount") - .unwrap() - .parse() - .expect("amount can be converted to u128"); - let from = get_pair_from_str(matches, arg_from); - let to = get_accountid_from_str(arg_to); - let direct: bool = matches.is_present("direct"); - println!("from ss58 is {}", from.public().to_ss58check()); - println!("to ss58 is {}", to.to_ss58check()); - - println!( - "send trusted call unshield_funds from {} to {}: {}", - from.public(), - to, - amount - ); - - let (mrenclave, shard) = get_identifiers(matches); - let nonce = get_layer_two_nonce!(from, matches); - let top: TrustedOperation = - TrustedCall::balance_unshield(from.public().into(), to, amount, shard) - .sign(&KeyPair::Sr25519(from), nonce, &mrenclave, &shard) - .into_trusted_operation(direct); - let _ = perform_operation(matches, &top); - Ok(()) - }), - ) - .add_cmd( - Command::new("play-turn") - .description("Player turn of connect four") - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("player") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("player's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("column") - .takes_value(true) - .required(true) - .value_name("u8") - .help("play stone in column, must be in the range of 1 to 7"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_player = matches.value_of("player").unwrap(); - let column = matches - .value_of("column") - .unwrap() - .parse() - .expect("amount can be converted to u8"); - - if !(1..=7).contains(&column) { - panic!("Game only allows columns in the range of 1 to 7"); - } - - let player = get_pair_from_str(matches, arg_player); - let direct: bool = matches.is_present("direct"); - - info!("player ss58 is {}", player.public().to_ss58check()); - info!("column choice is {:?}", column); - - println!( - "send trusted call play-turn from {} with column {:?}", - player.public(), - column - ); - let (mrenclave, shard) = get_identifiers(matches); - // get nonce - let top: TrustedOperation = TrustedGetter::nonce(player.public().into()) - .sign(&KeyPair::Sr25519(player.clone())) - .into(); - let res = perform_operation(matches, &top); - let nonce: Index = if let Some(n) = res { - if let Ok(nonce) = Index::decode(&mut n.as_slice()) { - nonce - } else { - info!("could not decode value. maybe hasn't been set? {:x?}", n); - 0 - } - } else { - 0 - }; - debug!("got nonce: {:?}", nonce); - let top: TrustedOperation = TrustedCall::connectfour_play_turn( - sr25519_core::Public::from(player.public()).into(), - column, - ) - .sign(&KeyPair::Sr25519(player), nonce, &mrenclave, &shard) - .into_trusted_operation(direct); - let _ = perform_operation(matches, &top); - Ok(()) - }), - ) - .add_cmd( - Command::new("get-board") - .description("query board state for account in keystore") - .options(|app| { - app.arg( - Arg::with_name("accountid") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("AccountId in ss58check format"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let arg_who = matches.value_of("accountid").unwrap(); - debug!("arg_who = {:?}", arg_who); - let who = get_pair_from_str(matches, arg_who); - let key_pair = sr25519_core::Pair::from(who.clone()); - let top: TrustedOperation = - TrustedGetter::board(sr25519_core::Public::from(who.public()).into()) - .sign(&KeyPair::Sr25519(key_pair)) - .into(); - let res = perform_operation(matches, &top); - debug!("received result for board"); - if let Some(v) = res { - if let Ok(board) = crate::SgxBoardStruct::decode(&mut v.as_slice()) { - println!("Last turn in block number: {}", board.last_turn); - println!("Next player: {}", board.next_player); - println!("Board state: {:?}", board.board_state); - println!("Board:"); - for row in 0..6 { - for column in 0..7 { - print!(" {} ", board.board[column][row]); - } - println!() - } - println!("====================="); - for column in 0..7 { - print!(" {} ", column); - } - println!(); - } else { - println!("could not decode board. maybe hasn't been set? {:x?}", v); - } - } else { - println!("could not fetch board"); - }; - - Ok(()) - }), - ) - .into_cmd("trusted") -} - -fn get_keystore_path(matches: &ArgMatches<'_>) -> PathBuf { - let (_mrenclave, shard) = get_identifiers(matches); - PathBuf::from(&format!("{}/{}", KEYSTORE_PATH, shard.encode().to_base58())) -} - -pub fn get_identifiers(matches: &ArgMatches<'_>) -> ([u8; 32], ShardIdentifier) { - let mut mrenclave = [0u8; 32]; - assert!(matches.is_present("mrenclave"), "--mrenclave must be provided"); - mrenclave.copy_from_slice( - &matches - .value_of("mrenclave") - .unwrap() - .from_base58() - .expect("mrenclave has to be base58 encoded"), - ); - let shard = match matches.value_of("shard") { - Some(val) => - ShardIdentifier::from_slice(&val.from_base58().expect("shard has to be base58 encoded")), - None => ShardIdentifier::from_slice(&mrenclave), - }; - (mrenclave, shard) -} - -// TODO this function is redundant with client::main -fn get_accountid_from_str(account: &str) -> AccountId { - match &account[..2] { - "//" => sr25519::Pair::from_string(account, None) - .unwrap() - .public() - .into_account() - .into(), - _ => sr25519::Public::from_ss58check(account).unwrap().into_account().into(), - } -} - -// TODO this function is ALMOST redundant with client::main -// get a pair either form keyring (well known keys) or from the store -fn get_pair_from_str(matches: &ArgMatches<'_>, account: &str) -> sr25519_core::Pair { - info!("getting pair for {}", account); - match &account[..2] { - "//" => sr25519_core::Pair::from_string(account, None).unwrap(), - _ => { - info!("fetching from keystore at {}", &KEYSTORE_PATH); - // open store without password protection - let store = - LocalKeystore::open(get_keystore_path(matches), None).expect("store should exist"); - info!("store opened"); - let _pair = store - .key_pair::( - &sr25519::Public::from_ss58check(account).unwrap().into(), - ) - .unwrap() - .unwrap(); - info!("key pair fetched"); - drop(store); - _pair.into() - }, - } -} diff --git a/app-libs/stf/src/helpers.rs b/app-libs/stf/src/helpers.rs index 3ce5970193..9c84317014 100644 --- a/app-libs/stf/src/helpers.rs +++ b/app-libs/stf/src/helpers.rs @@ -15,15 +15,19 @@ */ use crate::{ - stf_sgx_primitives::types::*, AccountId, Hash, Index, SgxBoardStruct, StfError, StfResult, H256, + stf_sgx_primitives::types::BlockNumber, AccountId, Hash, Index, SgxBoardStruct, StfError, + StfResult, H256, }; use codec::{Decode, Encode}; use itp_storage::{storage_double_map_key, storage_map_key, storage_value_key, StorageHasher}; use log::*; -use sgx_runtime::BlockNumber; -use sgx_tstd as std; use std::prelude::v1::*; +#[cfg(feature = "sgx")] +use crate::stf_sgx_primitives::types::{AccountData, AccountInfo}; +#[cfg(feature = "std")] +use itp_types::{AccountData, AccountInfo}; + pub fn get_storage_value( storage_prefix: &'static str, storage_key_name: &'static str, diff --git a/app-libs/stf/src/lib.rs b/app-libs/stf/src/lib.rs index 92e4886a06..7209769881 100644 --- a/app-libs/stf/src/lib.rs +++ b/app-libs/stf/src/lib.rs @@ -29,9 +29,9 @@ extern crate sgx_tstd as std; extern crate alloc; #[cfg(feature = "std")] -use my_node_runtime::Balance; -#[cfg(feature = "std")] -pub use my_node_runtime::Index; +pub use my_node_runtime::{Balance, Index}; +#[cfg(feature = "sgx")] +pub use sgx_runtime::{Balance, Index}; use codec::{Compact, Decode, Encode}; use derive_more::Display; @@ -98,17 +98,11 @@ impl From for KeyPair { } pub mod hash; +pub mod helpers; pub mod stf_sgx_primitives; #[cfg(feature = "sgx")] pub mod stf_sgx; - -#[cfg(feature = "sgx")] -pub mod helpers; - -#[cfg(feature = "std")] -pub mod cli; - #[cfg(all(feature = "test", feature = "sgx"))] pub mod test_genesis; diff --git a/app-libs/stf/src/stf_sgx_primitives.rs b/app-libs/stf/src/stf_sgx_primitives.rs index cac9235f28..a7a7bf3f9c 100644 --- a/app-libs/stf/src/stf_sgx_primitives.rs +++ b/app-libs/stf/src/stf_sgx_primitives.rs @@ -15,26 +15,26 @@ */ +use super::{Balance, Index}; use codec::{Decode, Encode}; use itp_types::H256; +#[cfg(all(not(feature = "sgx"), feature = "std"))] +use sp_runtime::traits::BlakeTwo256; pub mod types { - #[cfg(feature = "sgx")] - pub use sgx_runtime::{Balance, Index}; - #[cfg(all(not(feature = "sgx"), feature = "std"))] - use sp_runtime::{generic, traits::BlakeTwo256}; + use super::*; - #[cfg(feature = "sgx")] pub type AccountData = balances::AccountData; - #[cfg(feature = "sgx")] pub type AccountInfo = system::AccountInfo; // FIXME after fixing sgx-runtime issue #37 #[cfg(all(not(feature = "std"), feature = "sgx"))] pub type ParentchainHeader = sgx_runtime::Header; #[cfg(all(not(feature = "sgx"), feature = "std"))] pub type BlockNumber = u32; + #[cfg(all(not(feature = "std"), feature = "sgx"))] + pub type BlockNumber = sgx_runtime::BlockNumber; #[cfg(all(not(feature = "sgx"), feature = "std"))] - pub type ParentchainHeader = generic::Header; + pub type ParentchainHeader = sp_runtime::generic::Header; pub type StateType = sgx_externalities::SgxExternalitiesType; pub type State = sgx_externalities::SgxExternalities; diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 3a293b91f3..c469a016d1 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -12,32 +12,31 @@ json = "0.12.0" substrate-bip39 = "0.4.2" tiny-bip39 = "0.6.2" serde_json = "1.0" -clap = "2.33" -clap-nested = "0.3.1" -primitive-types = { version = "0.10.1", default-features = false, features = ["codec"] } +clap = { version = "3.1.6", features = ["derive"]} +primitive-types = { version = "0.11.1", default-features = false, features = ["codec"] } base58 = "0.1" chrono = "*" blake2-rfc = { version = "0.2.18", default-features = false} geojson = "0.17" ws = { version = "0.9.1", features = ["ssl"] } serde = { version = "1.0", features = ["derive"] } -codec = { version = "2.0.0", package = "parity-scale-codec", features = ["derive"] } +codec = { version = "3.0.0", package = "parity-scale-codec", features = ["derive"] } sgx_crypto_helper = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } # scs / integritee -substrate-api-client = { features = ["ws-client"], git = "https://github.com/scs/substrate-api-client", branch = "master" } -substrate-client-keystore = { git = "https://github.com/scs/substrate-api-client", branch = "master" } -my-node-runtime = { package = "ajuna-runtime", git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +substrate-api-client = { features = ["ws-client"], git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } +substrate-client-keystore = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } +my-node-runtime = { git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup", package = "ajuna-solo-runtime" } teerex-primitives = { git = "https://github.com/integritee-network/pallets.git", branch = "master" } # substrate dependencies -sp-runtime = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sc-keystore = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -pallet-balances = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -frame-system = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-application-crypto = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-runtime = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sc-keystore = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +pallet-balances = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +frame-system = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-application-crypto = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } #local dependencies itp-types = { path = "../core-primitives/types" } diff --git a/cli/README.md b/cli/README.md index 19ca4a9a7b..f2bf98121c 100644 --- a/cli/README.md +++ b/cli/README.md @@ -8,13 +8,20 @@ Includes ## examples ``` -> integritee-cli new-account -> integritee-cli 127.0.0.1 transfer 5GpuFm6t1AU9xpTAnQnHXakTGA9rSHz8xNkEvx7RVQz2BVpd 5FkGDttiYa9ZoDAuNxzwEdLzkgt6ngWykSBhobGvoFUcUo8B 12345 -> integritee-cli 127.0.0.1:9979 list-workers +> ./integritee-cli transfer //Bob //Alice 12345 +> ./integritee-cli -u ws://127.0.0.1 list-workers number of workers registered: 1 Enclave 1 - AccountId: 5DvVAZAWnFS6ufCteSbuh46miVUCQH5oZ231SXHQGswCdGx9 - MRENCLAVE: HvKRosdfbbLayao3rAq4xmN2fnxBVX79DfDdeJ9YcTo5 - RA timestamp: 2020-02-22 06:32:37 UTC - URL: 127.0.0.1:2000 + AccountId: 5HN8RGEiJuc9iNA3vfiYj7Lk6ULWzBZXvSDheohBu3usSUqn + MRENCLAVE: 4GMb72Acyg8hnnnGEJ89jZK5zxNC4LvSe2ME96wLRV6J + RA timestamp: 2022-03-16 10:43:12.001 UTC + URL: wss://127.0.0.1:2345 +> ./integritee-cli -P 2345 trusted --direct --mrenclave 4GMb72Acyg8hnnn +GE4LvSe2ME96wLRV6J unshield-funds //Bob //Alice 12345 +from ss58 is 5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty +to ss58 is 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY +send trusted call unshield_funds from 5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty to 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY: 12345 +Trusted call 0x69ddfd1698bd2d629180c2dca34ce7add087526c51f43cf68245241b3f13154e is Submitted +Trusted call 0x69ddfd1698bd2d629180c2dca34ce7add087526c51f43cf68245241b3f13154e is Invalid + ``` diff --git a/cli/demo_connect_four.sh b/cli/demo_connect_four.sh old mode 100644 new mode 100755 index 65dfa24d4c..a94e83bf34 --- a/cli/demo_connect_four.sh +++ b/cli/demo_connect_four.sh @@ -1,29 +1,30 @@ #!/bin/bash # setup: -# build ajuna node with skip-ias-check -# cargo build --release --features skip-ias-check +# build ajuna node with skip-ias-check on branch "validateer-setup" +# cargo build --release --features solo,skip-ias-check # # run ajuna node -# target/release/ajuna --dev --tmp --ws-port --port 30385 --rpc-port +# ./target/release/ajuna-solo --dev --tmp --ws-port # -# run worker +# run worker inside the bin folder: # rm light_client_db.bin # rm -r shards # rm -r sidechain_db # export RUST_LOG=integritee_service=info,ita_stf=debug -# integritee-service init_shard -# integritee-service shielding-key -# integritee-service signing-key -# integritee-service -P -p -r 3485 run --dev --skip-ra +# ./integritee-service init-shard +# ./integritee-service shielding-key +# ./integritee-service signing-key +# ./integritee-service -P -p -r run --dev --skip-ra # # then run this script # usage: # export RUST_LOG=integritee-cli=info,ita_stf=info -# demo_connect_four.sh -p -P -m +# ./demo_connect_four.sh -p -P -m file # -# if -m is set, the mrenclave will be read from file +# if -m file is set, the mrenclave will be read from file ~/mrenclave.b58 + while getopts ":m:p:P:" opt; do case $opt in @@ -73,12 +74,12 @@ echo " Bob's account = ${ACCOUNTBOB}" echo "" echo "* Issue ${BALANCE} tokens to Alice's account" -${CLIENT} trusted set-balance ${ACCOUNTALICE} ${BALANCE} --mrenclave=${MRENCLAVE} --direct +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct set-balance ${ACCOUNTALICE} ${BALANCE} echo "" sleep 1 echo "* Issue ${BALANCE} tokens to Bob's account" -${CLIENT} trusted set-balance ${ACCOUNTBOB} ${BALANCE} --mrenclave=${MRENCLAVE} --direct +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct set-balance ${ACCOUNTBOB} ${BALANCE} echo "" sleep 1 @@ -96,22 +97,22 @@ echo "waiting" sleep 45 echo "Turn for Alice (Player 1)" -${CLIENT} trusted play-turn ${ACCOUNTALICE} 3 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTALICE} 3 echo "" sleep 1 echo "Turn for Bob (Player 2)" -${CLIENT} trusted play-turn ${ACCOUNTBOB} 4 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTBOB} 4 echo "" sleep 1 echo "Turn for Alice (Player 1)" -${CLIENT} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 echo "Turn for Bob (Player 2)" -${CLIENT} trusted play-turn ${ACCOUNTBOB} 3 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTBOB} 3 echo "" sleep 1 @@ -119,28 +120,27 @@ echo "waiting" sleep 5 echo "Board after 2 turns" -${CLIENT} trusted get-board ${ACCOUNTBOB} --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} get-board ${ACCOUNTBOB} echo "" sleep 1 echo "Turn for Alice (Player 1)" -${CLIENT} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 echo "Turn for Bob (Player 2)" -${CLIENT} trusted play-turn ${ACCOUNTBOB} 5 --direct --mrenclave=${MRENCLAVE} -echo "" +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTBOB} 5 sleep 1 echo "Turn for Alice (Player 1)" -${CLIENT} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 echo "Turn for Bob (Player 2)" -${CLIENT} trusted play-turn ${ACCOUNTBOB} 1 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTBOB} 1 echo "" sleep 1 @@ -148,12 +148,12 @@ echo "waiting" sleep 5 echo "Board after 4 turns" -${CLIENT} trusted get-board ${ACCOUNTBOB} --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} get-board ${ACCOUNTBOB} echo "" sleep 1 echo "Turn for Alice (Player 1)" -${CLIENT} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 @@ -161,8 +161,5 @@ echo "waiting" sleep 5 echo "Board after end of game" -${CLIENT} trusted get-board ${ACCOUNTBOB} --direct --mrenclave=${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} get-board ${ACCOUNTBOB} echo "" - - - diff --git a/cli/demo_connect_four_two_workers.sh b/cli/demo_connect_four_two_workers.sh index 977b3d453b..1834234b05 100644 --- a/cli/demo_connect_four_two_workers.sh +++ b/cli/demo_connect_four_two_workers.sh @@ -1,29 +1,29 @@ #!/bin/bash # setup: -# build ajuna node with skip-ias-check -# cargo build --release --features skip-ias-check +# build ajuna node with skip-ias-check on branch "validateer-setup" +# cargo build --release --features solo,skip-ias-check # # run ajuna node -# target/release/ajuna --dev --tmp --ws-port --port 30385 --rpc-port +# ./target/release/ajuna-solo --dev --tmp --ws-port # -# run worker +# run worker inside the bin folder: # rm light_client_db.bin # rm -r shards # rm -r sidechain_db # export RUST_LOG=integritee_service=info,ita_stf=debug -# integritee-service init_shard -# integritee-service shielding-key -# integritee-service signing-key -# integritee-service -P -p -r 3485 run --dev --skip-ra +# ./integritee-service init-shard +# ./integritee-service shielding-key +# ./integritee-service signing-key +# ./integritee-service -P -p -r run --dev --skip-ra # # then run this script # usage: # export RUST_LOG=integritee-cli=info,ita_stf=info -# demo_connect_four.sh -p -A -B -m +# demo_connect_four.sh -p -A -B -m file # -# if -m is set, the mrenclave will be read from file +# if -m file is set, the mrenclave will be read from file ~/mrenclave.b58 while getopts ":m:p:A:B:" opt; do case $opt in @@ -79,12 +79,12 @@ echo " Bob's account = ${ACCOUNTBOB}" echo "" echo "* Issue ${BALANCE} tokens to Alice's account via Worker 1" -${CLIENTWORKER1} trusted set-balance ${ACCOUNTALICE} ${BALANCE} --mrenclave=${MRENCLAVE} --direct +${CLIENTWORKER1} trusted --mrenclave=${MRENCLAVE} --direct set-balance ${ACCOUNTALICE} ${BALANCE} echo "" sleep 1 echo "* Issue ${BALANCE} tokens to Bob's account via Worker 2" -${CLIENTWORKER2} trusted set-balance ${ACCOUNTBOB} ${BALANCE} --mrenclave=${MRENCLAVE} --direct +${CLIENTWORKER2} trusted --mrenclave=${MRENCLAVE} --direct set-balance ${ACCOUNTBOB} ${BALANCE} echo "" sleep 1 @@ -102,22 +102,21 @@ echo "waiting" sleep 45 echo "Turn for Alice (Player 1 via Worker 1)" -${CLIENTWORKER1} trusted play-turn ${ACCOUNTALICE} 3 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTALICE} 3 echo "" sleep 1 echo "Turn for Bob (Player 2 via Worker 2)" -${CLIENTWORKER2} trusted play-turn ${ACCOUNTBOB} 4 --direct --mrenclave=${MRENCLAVE} -echo "" +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTBOB} 4 sleep 1 echo "Turn for Alice (Player 1 via Worker 1)" -${CLIENTWORKER1} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 echo "Turn for Bob (Player 2 via Worker 2)" -${CLIENTWORKER2} trusted play-turn ${ACCOUNTBOB} 3 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTBOB} 3 echo "" sleep 1 @@ -125,28 +124,27 @@ echo "waiting" sleep 5 echo "Board after 2 turns (queried by Bob via Worker 2)" -${CLIENTWORKER2} trusted get-board ${ACCOUNTBOB} --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} get-board ${ACCOUNTBOB} echo "" sleep 1 echo "Turn for Alice (Player 1 via Worker 1)" -${CLIENTWORKER1} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} -echo "" +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTALICE} 2 sleep 1 echo "Turn for Bob (Player 2 via Worker 2)" -${CLIENTWORKER2} trusted play-turn ${ACCOUNTBOB} 5 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTBOB} 5 echo "" sleep 1 echo "Turn for Alice (Player 1 via Worker 1)" -${CLIENTWORKER1} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 echo "Turn for Bob (Player 2 via Worker 2)" -${CLIENTWORKER2} trusted play-turn ${ACCOUNTBOB} 1 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTBOB} 1 echo "" sleep 1 @@ -154,12 +152,12 @@ echo "waiting" sleep 5 echo "Board after 4 turns (queried by Alice via Worker 1)" -${CLIENTWORKER1} trusted get-board ${ACCOUNTALICE} --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} get-board ${ACCOUNTALICE} echo "" sleep 1 echo "Turn for Alice (Player 1 via Worker 1)" -${CLIENTWORKER1} trusted play-turn ${ACCOUNTALICE} 2 --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER1} trusted --direct --mrenclave=${MRENCLAVE} play-turn ${ACCOUNTALICE} 2 echo "" sleep 1 @@ -167,8 +165,5 @@ echo "waiting" sleep 5 echo "Board after end of game (queried by Alice via Worker 1)" -${CLIENTWORKER2} trusted get-board ${ACCOUNTBOB} --direct --mrenclave=${MRENCLAVE} +${CLIENTWORKER2} trusted --direct --mrenclave=${MRENCLAVE} get-board ${ACCOUNTBOB} echo "" - - - diff --git a/cli/demo_direct_call.sh b/cli/demo_direct_call.sh index b2e828129a..3e3ad5b631 100755 --- a/cli/demo_direct_call.sh +++ b/cli/demo_direct_call.sh @@ -72,29 +72,25 @@ echo " Bob's incognito account = ${ICGACCOUNTBOB}" echo "" echo "* Issue ${AMOUNTSHIELD} tokens to Alice's incognito account" -${CLIENT} trusted set-balance ${ICGACCOUNTALICE} ${AMOUNTSHIELD} --mrenclave ${MRENCLAVE} --direct +${CLIENT} trusted --mrenclave ${MRENCLAVE} --direct set-balance ${ICGACCOUNTALICE} ${AMOUNTSHIELD} echo "" echo "Get balance of Alice's incognito account" -${CLIENT} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} echo "" #send funds from Alice to bobs account echo "* Send ${AMOUNTTRANSFER} funds from Alice's incognito account to Bob's incognito account" -$CLIENT trusted transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} --mrenclave ${MRENCLAVE} --direct -echo "" - -echo "* Waiting 3 seconds" -sleep 3 +$CLIENT trusted --mrenclave ${MRENCLAVE} --direct transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} echo "" echo "* Get balance of Alice's incognito account" -RESULT=$(${CLIENT} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} | xargs) +RESULT=$(${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} | xargs) echo $RESULT echo "" echo "* Bob's incognito account balance" -RESULT=$(${CLIENT} trusted balance ${ICGACCOUNTBOB} --mrenclave ${MRENCLAVE} | xargs) +RESULT=$(${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTBOB} | xargs) echo $RESULT echo "" diff --git a/cli/demo_private_tx.sh b/cli/demo_private_tx.sh index e0b2f6f1e8..9685084c12 100755 --- a/cli/demo_private_tx.sh +++ b/cli/demo_private_tx.sh @@ -32,19 +32,19 @@ read MRENCLAVE <<< $(cat ~/mrenclave.b58) # only for initial setup (actually should be done in genesis) # pre-fund //AliceIncognito, our ROOT key echo "issue funds on first (sender) account:" -$CLIENT trusted set-balance //AliceIncognito 123456789 --mrenclave $MRENCLAVE +$CLIENT trusted --mrenclave $MRENCLAVE set-balance //AliceIncognito 123456789 echo -n "get balance: " -$CLIENT trusted balance //AliceIncognito --mrenclave $MRENCLAVE +$CLIENT trusted --mrenclave $MRENCLAVE balance //AliceIncognito # create incognito account for default shard (= MRENCLAVE) -account1p=$($CLIENT trusted new-account --mrenclave $MRENCLAVE) +account1p=$($CLIENT trusted --mrenclave $MRENCLAVE new-account) echo "created new incognito account: $account1p" #send 10M funds from AliceIncognito to new account -$CLIENT trusted transfer //AliceIncognito $account1p 23456789 --mrenclave $MRENCLAVE +$CLIENT trusted --mrenclave $MRENCLAVE transfer //AliceIncognito $account1p 23456789 echo -n "receiver balance: " -$CLIENT trusted balance $account1p --mrenclave $MRENCLAVE +$CLIENT trusted --mrenclave $MRENCLAVE balance $account1p echo -n "sender balance: " -$CLIENT trusted balance //AliceIncognito --mrenclave $MRENCLAVE +$CLIENT trusted --mrenclave $MRENCLAVE balance //AliceIncognito diff --git a/cli/demo_shielding_unshielding.sh b/cli/demo_shielding_unshielding.sh index 5266f6a106..a429951eb2 100755 --- a/cli/demo_shielding_unshielding.sh +++ b/cli/demo_shielding_unshielding.sh @@ -81,7 +81,7 @@ echo " Alice's incognito account = ${ICGACCOUNTALICE}" echo "" echo "* Create a new incognito account for Bob" -ICGACCOUNTBOB=$(${CLIENT} trusted new-account --mrenclave ${MRENCLAVE}) +ICGACCOUNTBOB=$(${CLIENT} trusted --mrenclave ${MRENCLAVE} new-account ) echo " Bob's incognito account = ${ICGACCOUNTBOB}" echo "" @@ -97,7 +97,7 @@ sleep 10 echo "" echo "Get balance of Alice's incognito account" -${CLIENT} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} echo "" echo "* Get balance of Alice's on-chain account" @@ -105,19 +105,19 @@ ${CLIENT} balance "//Alice" echo "" echo "* Send ${AMOUNTTRANSFER} funds from Alice's incognito account to Bob's incognito account" -$CLIENT trusted transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} --mrenclave ${MRENCLAVE} +$CLIENT trusted --mrenclave ${MRENCLAVE} transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} echo "" echo "* Get balance of Alice's incognito account" -${CLIENT} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} echo "" echo "* Bob's incognito account balance" -${CLIENT} trusted balance ${ICGACCOUNTBOB} --mrenclave ${MRENCLAVE} +${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTBOB} echo "" echo "* Un-shield ${AMOUNTUNSHIELD} tokens from Alice's incognito account" -${CLIENT} trusted unshield-funds ${ICGACCOUNTALICE} //Alice ${AMOUNTUNSHIELD} ${MRENCLAVE} --mrenclave ${MRENCLAVE} --xt-signer //Alice +${CLIENT} trusted --mrenclave ${MRENCLAVE} --xt-signer //Alice unshield-funds ${ICGACCOUNTALICE} //Alice ${AMOUNTUNSHIELD} echo "" echo "* Waiting 10 seconds" @@ -125,7 +125,7 @@ sleep 10 echo "" echo "Get balance of Alice's incognito account" -RESULT=$(${CLIENT} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} | xargs) +RESULT=$(${CLIENT} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} | xargs) echo $RESULT echo "* Get balance of Alice's on-chain account" diff --git a/cli/demo_sidechain.sh b/cli/demo_sidechain.sh index bd0e7e585d..36bc079aa8 100755 --- a/cli/demo_sidechain.sh +++ b/cli/demo_sidechain.sh @@ -78,42 +78,30 @@ echo " Bob's incognito account = ${ICGACCOUNTBOB}" echo "" echo "* Issue ${INITIALFUNDS} tokens to Alice's incognito account (on worker 1)" -${CLIENTWORKER1} trusted set-balance ${ICGACCOUNTALICE} ${INITIALFUNDS} --mrenclave ${MRENCLAVE} --direct -echo "" - -echo "* Waiting 2 seconds" -sleep 2 +${CLIENTWORKER1} trusted --mrenclave ${MRENCLAVE} --direct set-balance ${ICGACCOUNTALICE} ${INITIALFUNDS} echo "" echo "Get balance of Alice's incognito account (on worker 1)" -${CLIENTWORKER1} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} +${CLIENTWORKER1} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} echo "" # Send funds from Alice to Bobs account, on worker 1 echo "* First transfer: Send ${AMOUNTTRANSFER} funds from Alice's incognito account to Bob's incognito account (on worker 1)" -$CLIENTWORKER1 trusted transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} --mrenclave ${MRENCLAVE} --direct -echo "" - -echo "* Waiting 2 seconds" -sleep 2 +$CLIENTWORKER1 trusted --mrenclave ${MRENCLAVE} --direct transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} echo "" # Send funds from Alice to Bobs account, on worker 2 echo "* Second transfer: Send ${AMOUNTTRANSFER} funds from Alice's incognito account to Bob's incognito account (on worker 2)" -$CLIENTWORKER2 trusted transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} --mrenclave ${MRENCLAVE} --direct -echo "" - -echo "* Waiting 2 seconds" -sleep 2 +$CLIENTWORKER2 trusted --mrenclave ${MRENCLAVE} --direct transfer ${ICGACCOUNTALICE} ${ICGACCOUNTBOB} ${AMOUNTTRANSFER} echo "" echo "* Get balance of Alice's incognito account (on worker 2)" -ALICE_BALANCE=$(${CLIENTWORKER2} trusted balance ${ICGACCOUNTALICE} --mrenclave ${MRENCLAVE} | xargs) +ALICE_BALANCE=$(${CLIENTWORKER2} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTALICE} | xargs) echo "$ALICE_BALANCE" echo "" echo "* Get balance of Bob's incognito account (on worker 2)" -BOB_BALANCE=$(${CLIENTWORKER2} trusted balance ${ICGACCOUNTBOB} --mrenclave ${MRENCLAVE} | xargs) +BOB_BALANCE=$(${CLIENTWORKER2} trusted --mrenclave ${MRENCLAVE} balance ${ICGACCOUNTBOB} | xargs) echo "$BOB_BALANCE" echo "" @@ -139,5 +127,3 @@ fi echo "" exit 0 - - diff --git a/cli/src/command_utils.rs b/cli/src/command_utils.rs new file mode 100644 index 0000000000..15bfbc846a --- /dev/null +++ b/cli/src/command_utils.rs @@ -0,0 +1,91 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::Cli; +use codec::Encode; +use itc_rpc_client::direct_client::{DirectApi, DirectClient as DirectWorkerApi}; +use log::*; +use my_node_runtime::{AccountId, Signature}; +use sgx_crypto_helper::rsa3072::Rsa3072PubKey; +use sp_application_crypto::sr25519; +use sp_core::{crypto::Ss58Codec, Pair}; +use sp_runtime::traits::{IdentifyAccount, Verify}; +use std::path::PathBuf; +use substrate_api_client::{rpc::WsRpcClient, Api}; +use substrate_client_keystore::LocalKeystore; + +type AccountPublic = ::Signer; +pub(crate) const KEYSTORE_PATH: &str = "my_keystore"; + +pub(crate) fn encode_encrypt( + cli: &Cli, + to_encrypt: E, +) -> Result<(Vec, Vec), String> { + let worker_api_direct = get_worker_api_direct(cli); + let shielding_pubkey: Rsa3072PubKey = match worker_api_direct.get_rsa_pubkey() { + Ok(key) => key, + Err(err_msg) => return Err(err_msg.to_string()), + }; + + let encoded = to_encrypt.encode(); + let mut encrypted: Vec = Vec::new(); + shielding_pubkey.encrypt_buffer(&encoded, &mut encrypted).unwrap(); + Ok((encoded, encrypted)) +} + +pub(crate) fn get_chain_api(cli: &Cli) -> Api { + let url = format!("{}:{}", cli.node_url, cli.node_port); + info!("connecting to {}", url); + Api::::new(WsRpcClient::new(&url)).unwrap() +} + +pub(crate) fn get_accountid_from_str(account: &str) -> AccountId { + match &account[..2] { + "//" => AccountPublic::from(sr25519::Pair::from_string(account, None).unwrap().public()) + .into_account(), + _ => AccountPublic::from(sr25519::Public::from_ss58check(account).unwrap()).into_account(), + } +} + +pub(crate) fn get_worker_api_direct(cli: &Cli) -> DirectWorkerApi { + let url = format!("{}:{}", cli.worker_url, cli.trusted_worker_port); + info!("Connecting to integritee-service-direct-port on '{}'", url); + DirectWorkerApi::new(url) +} + +/// get a pair either form keyring (well known keys) or from the store +pub(crate) fn get_pair_from_str(account: &str) -> sr25519::AppPair { + info!("getting pair for {}", account); + match &account[..2] { + "//" => sr25519::AppPair::from_string(account, None).unwrap(), + _ => { + info!("fetching from keystore at {}", &KEYSTORE_PATH); + // open store without password protection + let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None) + .expect("store should exist"); + info!("store opened"); + let _pair = store + .key_pair::( + &sr25519::Public::from_ss58check(account).unwrap().into(), + ) + .unwrap() + .unwrap(); + drop(store); + _pair + }, + } +} diff --git a/cli/src/commands.rs b/cli/src/commands.rs new file mode 100644 index 0000000000..6aff7f8df3 --- /dev/null +++ b/cli/src/commands.rs @@ -0,0 +1,392 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +extern crate chrono; +use crate::{command_utils::*, trusted_commands, trusted_commands::TrustedArgs, Cli}; +use base58::{FromBase58, ToBase58}; +use chrono::{DateTime, Utc}; +use clap::Subcommand; +use codec::{Decode, Encode}; +use ita_stf::ShardIdentifier; +use itc_rpc_client::direct_client::DirectApi; +use itp_node_api_extensions::{PalletTeerexApi, TEEREX}; +use itp_registry_storage::REGISTRY; +use log::*; +use my_node_runtime::{Balance, BalancesCall, Call, Event, Hash}; +use sp_application_crypto::{ed25519, sr25519}; +use sp_core::{crypto::Ss58Codec, sr25519 as sr25519_core, Pair, H256}; +use sp_keyring::AccountKeyring; +use std::{ + path::PathBuf, + sync::mpsc::channel, + time::{Duration, UNIX_EPOCH}, +}; +use substrate_api_client::{ + compose_extrinsic, compose_extrinsic_offline, utils::FromHexString, GenericAddress, Metadata, + UncheckedExtrinsicV4, XtStatus, +}; +use substrate_client_keystore::{KeystoreExt, LocalKeystore}; + +const PREFUNDING_AMOUNT: u128 = 1_000_000_000; + +#[derive(Subcommand)] +pub enum Commands { + /// query parentchain balance for AccountId + Balance { + /// AccountId in ss58check format + account: String, + }, + + /// generates a new account for the integritee chain in your local keystore + NewAccount, + + /// lists all accounts in your local keystore for the integritee chain + ListAccounts, + + /// query node metadata and print it as json to stdout + PrintMetadata, + + /// query sgx-runtime metadata and print it as json to stdout + PrintSgxMetadata, + + /// send some bootstrapping funds to supplied account(s) + Faucet { + /// Account(s) to be funded, ss58check encoded + #[clap(min_values = 1, required = true)] + accounts: Vec, + }, + + /// transfer funds from one parentchain account to another + Transfer { + /// sender's AccountId in ss58check format + from: String, + + /// recipient's AccountId in ss58check format + to: String, + + /// amount to be transferred + amount: Balance, + }, + + /// query enclave registry and list all workers + ListWorkers, + + /// listen to parentchain events + Listen { + /// exit after given number of parentchain events + #[clap(short, long = "exit-after")] + events: Option, + + /// exit after given number of blocks + #[clap(short, long = "await-blocks")] + blocks: Option, + }, + + /// Transfer funds from an parentchain account to an incognito account + ShieldFunds { + /// Sender's parentchain AccountId in ss58check format + from: String, + + /// Recipient's incognito AccountId in ss58check format + to: String, + + /// Amount to be transferred + amount: Balance, + + /// Shard identifier + shard: String, + }, + + /// Sign up for a game queue, ready to be matched. + QueueGame { + /// sender's AccountId in ss58check format + who: String, + }, + + /// trusted calls to worker enclave + #[clap(after_help = "stf subcommands depend on the stf crate this has been built against")] + Trusted(TrustedArgs), +} + +pub fn match_command(cli: &Cli) { + match &cli.command { + Commands::Balance { account } => balance(cli, account), + Commands::NewAccount => new_account(), + Commands::ListAccounts => list_accounts(), + Commands::PrintMetadata => print_metadata(cli), + Commands::PrintSgxMetadata => print_sgx_metadata(cli), + Commands::Faucet { accounts } => faucet(cli, accounts), + Commands::Transfer { from, to, amount } => transfer(cli, from, to, amount), + Commands::ListWorkers => list_workers(cli), + Commands::Listen { events, blocks } => listen(cli, events, blocks), + Commands::ShieldFunds { from, to, amount, shard } => + shield_funds(cli, from, to, amount, shard), + Commands::QueueGame { who } => queue_game(cli, who), + Commands::Trusted(trusted) => trusted_commands::match_trusted_commands(cli, trusted), + }; +} + +fn balance(cli: &Cli, account: &str) { + let api = get_chain_api(cli); + let accountid = get_accountid_from_str(account); + let balance = + if let Some(data) = api.get_account_data(&accountid).unwrap() { data.free } else { 0 }; + println!("{}", balance); +} + +fn new_account() { + let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None).unwrap(); + let key: sr25519::AppPair = store.generate().unwrap(); + drop(store); + println!("{}", key.public().to_ss58check()); +} + +fn list_accounts() { + let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None).unwrap(); + println!("sr25519 keys:"); + for pubkey in store.public_keys::().unwrap().into_iter() { + println!("{}", pubkey.to_ss58check()); + } + println!("ed25519 keys:"); + for pubkey in store.public_keys::().unwrap().into_iter() { + println!("{}", pubkey.to_ss58check()); + } + drop(store); +} + +fn print_metadata(cli: &Cli) { + let meta = get_chain_api(cli).get_metadata().unwrap(); + println!("Metadata:\n {}", Metadata::pretty_format(&meta).unwrap()); +} + +fn print_sgx_metadata(cli: &Cli) { + let worker_api_direct = get_worker_api_direct(cli); + let metadata = worker_api_direct.get_state_metadata().unwrap(); + println!("Metadata:\n {}", Metadata::pretty_format(&metadata).unwrap()); +} + +fn faucet(cli: &Cli, accounts: &[String]) { + let api = get_chain_api(cli).set_signer(AccountKeyring::Alice.pair()); + let mut nonce = api.get_nonce().unwrap(); + for account in accounts { + let to = get_accountid_from_str(account); + #[allow(clippy::redundant_clone)] + let xt: UncheckedExtrinsicV4<_> = compose_extrinsic_offline!( + api.clone().signer.unwrap(), + Call::Balances(BalancesCall::transfer { + dest: GenericAddress::Id(to.clone()), + value: PREFUNDING_AMOUNT + }), + nonce, + Era::Immortal, + api.genesis_hash, + api.genesis_hash, + api.runtime_version.spec_version, + api.runtime_version.transaction_version + ); + // send and watch extrinsic until finalized + println!("Faucet drips to {} (Alice's nonce={})", to, nonce); + let _blockh = api.send_extrinsic(xt.hex_encode(), XtStatus::Ready).unwrap(); + nonce += 1; + } +} + +fn transfer(cli: &Cli, from: &str, to: &str, amount: &Balance) { + let from_account = get_pair_from_str(from); + let to_account = get_accountid_from_str(to); + info!("from ss58 is {}", from_account.public().to_ss58check()); + info!("to ss58 is {}", to_account.to_ss58check()); + let api = get_chain_api(cli).set_signer(sr25519_core::Pair::from(from_account)); + let xt = api.balance_transfer(GenericAddress::Id(to_account.clone()), *amount); + let tx_hash = api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap(); + println!("[+] TrustedOperation got finalized. Hash: {:?}\n", tx_hash); + let result = api.get_account_data(&to_account).unwrap().unwrap(); + println!("balance for {} is now {}", to_account, result.free); +} + +fn list_workers(cli: &Cli) { + let api = get_chain_api(cli); + let wcount = api.enclave_count(None).unwrap(); + println!("number of workers registered: {}", wcount); + for w in 1..=wcount { + let enclave = api.enclave(w, None).unwrap(); + if enclave.is_none() { + println!("error reading enclave data"); + continue + }; + let enclave = enclave.unwrap(); + let timestamp = + DateTime::::from(UNIX_EPOCH + Duration::from_millis(enclave.timestamp as u64)); + println!("Enclave {}", w); + println!(" AccountId: {}", enclave.pubkey.to_ss58check()); + println!(" MRENCLAVE: {}", enclave.mr_enclave.to_base58()); + println!(" RA timestamp: {}", timestamp); + println!(" URL: {}", enclave.url); + } +} + +fn listen(cli: &Cli, events_arg: &Option, blocks_arg: &Option) { + println!("{:?} {:?}", events_arg, blocks_arg); + let api = get_chain_api(cli); + info!("Subscribing to events"); + let (events_in, events_out) = channel(); + let mut count = 0u32; + let mut blocks = 0u32; + api.subscribe_events(events_in).unwrap(); + loop { + if let Some(e) = events_arg { + if count >= *e { + return + } + }; + if let Some(b) = blocks_arg { + if blocks >= *b { + return + } + }; + let event_str = events_out.recv().unwrap(); + let _unhex = Vec::from_hex(event_str).unwrap(); + let mut _er_enc = _unhex.as_slice(); + let _events = Vec::>::decode(&mut _er_enc); + blocks += 1; + match _events { + Ok(evts) => + for evr in &evts { + println!("decoded: phase {:?} event {:?}", evr.phase, evr.event); + match &evr.event { + Event::Balances(be) => { + println!(">>>>>>>>>> balances event: {:?}", be); + match &be { + pallet_balances::Event::Transfer { from, to, amount } => { + println!("From: {:?}", from); + println!("To: {:?}", to); + println!("Value: {:?}", amount); + }, + _ => { + debug!("ignoring unsupported balances event"); + }, + } + }, + Event::Teerex(ee) => { + println!(">>>>>>>>>> integritee event: {:?}", ee); + count += 1; + match &ee { + my_node_runtime::pallet_teerex::Event::AddedEnclave( + accountid, + url, + ) => { + println!( + "AddedEnclave: {:?} at url {}", + accountid, + String::from_utf8(url.to_vec()) + .unwrap_or_else(|_| "error".to_string()) + ); + }, + my_node_runtime::pallet_teerex::Event::RemovedEnclave( + accountid, + ) => { + println!("RemovedEnclave: {:?}", accountid); + }, + my_node_runtime::pallet_teerex::Event::Forwarded(shard) => { + println!( + "Forwarded request for shard {}", + shard.encode().to_base58() + ); + }, + my_node_runtime::pallet_teerex::Event::ProcessedParentchainBlock( + accountid, + block_hash, + merkle_root, + ) => { + println!( + "ProcessedParentchainBlock from {} with hash {:?} and merkle root {:?}", + accountid, block_hash, merkle_root + ); + }, + my_node_runtime::pallet_teerex::Event::ProposedSidechainBlock( + accountid, + block_hash, + ) => { + println!( + "ProposedSidechainBlock from {} with hash {:?}", + accountid, block_hash + ); + }, + my_node_runtime::pallet_teerex::Event::ShieldFunds( + incognito_account, + ) => { + println!("ShieldFunds for {:?}", incognito_account); + }, + my_node_runtime::pallet_teerex::Event::UnshieldedFunds( + public_account, + ) => { + println!("UnshieldFunds for {:?}", public_account); + }, + _ => debug!("ignoring unsupported teerex event: {:?}", ee), + } + }, + _ => debug!("ignoring unsupported module event: {:?}", evr.event), + } + }, + Err(_) => error!("couldn't decode event record list"), + } + } +} + +fn shield_funds(cli: &Cli, arg_from: &str, arg_to: &str, amount: &Balance, shard: &str) { + let chain_api = get_chain_api(cli); + + let shard_opt = match shard.from_base58() { + Ok(s) => ShardIdentifier::decode(&mut &s[..]), + _ => panic!("shard argument must be base58 encoded"), + }; + + let shard = match shard_opt { + Ok(shard) => shard, + Err(e) => panic!("{}", e), + }; + + // get the sender + let from = get_pair_from_str(arg_from); + let chain_api = chain_api.set_signer(sr25519_core::Pair::from(from)); + + // get the recipient + let to = get_accountid_from_str(arg_to); + let (_to_encoded, to_encrypted) = match encode_encrypt(cli, to) { + Ok((encoded, encrypted)) => (encoded, encrypted), + Err(e) => panic!("{}", e), + }; + // compose the extrinsic + let xt: UncheckedExtrinsicV4<([u8; 2], Vec, u128, H256)> = + compose_extrinsic!(chain_api, TEEREX, "shield_funds", to_encrypted, *amount, shard); + + let tx_hash = chain_api.send_extrinsic(xt.hex_encode(), XtStatus::Finalized).unwrap(); + println!("[+] TrustedOperation got finalized. Hash: {:?}\n", tx_hash); +} + +fn queue_game(cli: &Cli, who: &str) { + let account = get_pair_from_str(who); + info!("Queueing player: {}", account.public().to_ss58check()); + + let chain_api = get_chain_api(cli).set_signer(sr25519_core::Pair::from(account)); + + // compose the extrinsic + let xt: UncheckedExtrinsicV4<([u8; 2])> = compose_extrinsic!(chain_api, REGISTRY, "queue"); + + let tx_hash = chain_api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap(); + println!("[+] Successfully registered player in game queue. Extrinsic Hash: {:?}\n", tx_hash); +} diff --git a/cli/src/main.rs b/cli/src/main.rs index ccedb16074..ef5d71aed4 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,16 +1,19 @@ -// Copyright (c) 2019 Alain Brenzikofer -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ //! an RPC client to Integritee using websockets //! @@ -20,873 +23,52 @@ #![feature(rustc_private)] #[macro_use] extern crate clap; +extern crate chrono; extern crate env_logger; extern crate log; -extern crate chrono; -use chrono::{DateTime, Utc}; -use std::time::{Duration, UNIX_EPOCH}; - -use sgx_crypto_helper::rsa3072::Rsa3072PubKey; - -use sp_application_crypto::{ed25519, sr25519}; -use sp_keyring::AccountKeyring; -use std::path::PathBuf; +mod command_utils; +mod commands; +mod trusted_command_utils; +mod trusted_commands; +mod trusted_operation; -use base58::{FromBase58, ToBase58}; +use crate::commands::Commands; +use clap::Parser; -use clap::{AppSettings, Arg, ArgMatches}; -use clap_nested::{Command, Commander}; -use codec::{Decode, Encode}; -use log::*; -use my_node_runtime::{AccountId, BalancesCall, Call, Event, Hash, Signature}; -use sp_core::{crypto::Ss58Codec, sr25519 as sr25519_core, Pair, H256}; -use sp_runtime::{ - traits::{IdentifyAccount, Verify}, - MultiSignature, -}; -use std::{result::Result as StdResult, sync::mpsc::channel, thread}; -use substrate_api_client::{ - compose_extrinsic, compose_extrinsic_offline, - rpc::{ws_client::Subscriber, WsRpcClient}, - utils::FromHexString, - Api, GenericAddress, Metadata, RpcClient, UncheckedExtrinsicV4, XtStatus, -}; -use teerex_primitives::Request; - -use ita_stf::{ShardIdentifier, TrustedCallSigned, TrustedOperation}; -use itc_rpc_client::direct_client::{DirectApi, DirectClient as DirectWorkerApi}; -use itp_node_api_extensions::{PalletTeerexApi, TEEREX}; -use itp_registry_storage::REGISTRY; -use itp_types::{DirectRequestStatus, RpcRequest, RpcResponse, RpcReturnValue}; -use substrate_client_keystore::{KeystoreExt, LocalKeystore}; - -type AccountPublic = ::Signer; -const KEYSTORE_PATH: &str = "my_keystore"; -const PREFUNDING_AMOUNT: u128 = 1_000_000_000; const VERSION: &str = env!("CARGO_PKG_VERSION"); -fn main() { - env_logger::init(); - - let res = Commander::new() - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("node-url") - .short("u") - .long("node-url") - .global(true) - .takes_value(true) - .value_name("STRING") - .default_value("ws://127.0.0.1") - .help("node url"), - ) - .arg( - Arg::with_name("node-port") - .short("p") - .long("node-port") - .global(true) - .takes_value(true) - .value_name("STRING") - .default_value("9944") - .help("node port"), - ) - .arg( - Arg::with_name("worker-url") - .short("U") - .long("worker-url") - .global(true) - .takes_value(true) - .value_name("STRING") - .default_value("wss://127.0.0.1") - .help("worker url"), - ) - .arg( - Arg::with_name("trusted-worker-port") - .short("P") - .long("trusted-worker-port") - .global(true) - .takes_value(true) - .value_name("STRING") - .default_value("2000") - .help("worker direct invocation port"), - ) - .name("integritee-cli") - .version(VERSION) - .author("Integritee AG ") - .about("interact with integritee-node and workers") - .after_help("stf subcommands depend on the stf crate this has been built against") - }) - .args(|_args, matches| matches.value_of("environment").unwrap_or("dev")) - .add_cmd( - Command::new("new-account") - .description("generates a new account for the integritee chain") - .runner(|_args: &str, _matches: &ArgMatches<'_>| { - let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None).unwrap(); - let key: sr25519::AppPair = store.generate().unwrap(); - drop(store); - println!("{}", key.public().to_ss58check()); - Ok(()) - }), - ) - .add_cmd( - Command::new("list-accounts") - .description("lists all accounts in keystore for the integritee chain") - .runner(|_args: &str, _matches: &ArgMatches<'_>| { - let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None).unwrap(); - println!("sr25519 keys:"); - for pubkey in store.public_keys::().unwrap().into_iter() { - println!("{}", pubkey.to_ss58check()); - } - println!("ed25519 keys:"); - for pubkey in store.public_keys::().unwrap().into_iter() { - println!("{}", pubkey.to_ss58check()); - } - drop(store); - Ok(()) - }), - ) - .add_cmd( - Command::new("print-metadata") - .description("query node metadata and print it as json to stdout") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let meta = get_chain_api(matches).get_metadata().unwrap(); - println!("Metadata:\n {}", Metadata::pretty_format(&meta).unwrap()); - Ok(()) - }), - ) - .add_cmd( - Command::new("print-sgx-metadata") - .description("query sgx-runtime metadata and print it as json to stdout") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let worker_api_direct = get_worker_api_direct(matches); - let metadata = worker_api_direct.get_state_metadata().unwrap(); - println!("Metadata:\n {}", Metadata::pretty_format(&metadata).unwrap()); - Ok(()) - }), - ) - .add_cmd( - Command::new("print-sgx-metadata") - .description("query sgx-runtime metadata and print it as json to stdout") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let worker_api_direct = get_worker_api_direct(matches); - let metadata = worker_api_direct.get_state_metadata().unwrap(); - println!("Metadata:\n {}", Metadata::pretty_format(&metadata).unwrap()); - Ok(()) - }), - ) - .add_cmd( - Command::new("faucet") - .description("send some bootstrapping funds to supplied account(s)") - .options(|app| { - app.setting(AppSettings::ColoredHelp).arg( - Arg::with_name("accounts") - .takes_value(true) - .required(true) - .value_name("ACCOUNT") - .multiple(true) - .min_values(1) - .help("Account(s) to be funded, ss58check encoded"), - ) - }) - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let api = get_chain_api(matches); - let _api = api.set_signer(AccountKeyring::Alice.pair()); - let accounts = matches.values_of("accounts").unwrap(); - - let mut nonce = _api.get_nonce().unwrap(); - for account in accounts { - let to = get_accountid_from_str(account); - #[allow(clippy::redundant_clone)] - let xt: UncheckedExtrinsicV4<_> = compose_extrinsic_offline!( - _api.clone().signer.unwrap(), - Call::Balances(BalancesCall::transfer { - dest: GenericAddress::Id(to.clone()), - value: PREFUNDING_AMOUNT - }), - nonce, - Era::Immortal, - _api.genesis_hash, - _api.genesis_hash, - _api.runtime_version.spec_version, - _api.runtime_version.transaction_version - ); - // send and watch extrinsic until finalized - println!("Faucet drips to {} (Alice's nonce={})", to, nonce); - let _blockh = - _api.send_extrinsic(xt.hex_encode(), XtStatus::Ready).unwrap(); - nonce += 1; - } - Ok(()) - }), - ) - .add_cmd( - Command::new("balance") - .description("query on-chain balance for AccountId") - .options(|app| { - app.setting(AppSettings::ColoredHelp).arg( - Arg::with_name("AccountId") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("AccountId in ss58check format"), - ) - }) - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let api = get_chain_api(matches); - let account = matches.value_of("AccountId").unwrap(); - let accountid = get_accountid_from_str(account); - let balance = if let Some(data) = api.get_account_data(&accountid).unwrap() { - data.free - } else { - 0 - }; - println!("{}", balance); - Ok(()) - }), - ) - .add_cmd( - Command::new("transfer") - .description("transfer funds from one on-chain account to another") - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("from") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("sender's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("to") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("recipient's AccountId in ss58check format"), - ) - .arg( - Arg::with_name("amount") - .takes_value(true) - .required(true) - .value_name("U128") - .help("amount to be transferred"), - ) - }) - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let api = get_chain_api(matches); - let arg_from = matches.value_of("from").unwrap(); - let arg_to = matches.value_of("to").unwrap(); - let amount = matches - .value_of("amount") - .unwrap() - .parse() - .expect("amount can be converted to u128"); - let from = get_pair_from_str(arg_from); - let to = get_accountid_from_str(arg_to); - info!("from ss58 is {}", from.public().to_ss58check()); - info!("to ss58 is {}", to.to_ss58check()); - let _api = api.set_signer(sr25519_core::Pair::from(from)); - let xt = _api.balance_transfer(GenericAddress::Id(to.clone()), amount); - let tx_hash = _api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap(); - println!("[+] TrustedOperation got finalized. Hash: {:?}\n", tx_hash); - let result = _api.get_account_data(&to).unwrap().unwrap(); - println!("balance for {} is now {}", to, result.free); - Ok(()) - }), - ) - .add_cmd( - Command::new("list-workers") - .description("query enclave registry and list all workers") - .runner(|_args: &str, matches: &ArgMatches<'_>| { - let api = get_chain_api(matches); - let wcount = api.enclave_count(None).unwrap(); - println!("number of workers registered: {}", wcount); - for w in 1..=wcount { - let enclave = api.enclave(w, None).unwrap(); - if enclave.is_none() { - println!("error reading enclave data"); - continue - }; - let enclave = enclave.unwrap(); - let timestamp = DateTime::::from( - UNIX_EPOCH + Duration::from_millis(enclave.timestamp as u64), - ); - println!("Enclave {}", w); - println!(" AccountId: {}", enclave.pubkey.to_ss58check()); - println!(" MRENCLAVE: {}", enclave.mr_enclave.to_base58()); - println!(" RA timestamp: {}", timestamp); - println!(" URL: {}", enclave.url); - } - Ok(()) - }), - ) - .add_cmd( - Command::new("listen") - .description("listen to on-chain events") - .options(|app| { - app.setting(AppSettings::ColoredHelp) - .arg( - Arg::with_name("events") - .short("e") - .long("exit-after") - .takes_value(true) - .help("exit after given number of Integritee events"), - ) - .arg( - Arg::with_name("blocks") - .short("b") - .long("await-blocks") - .takes_value(true) - .help("exit after given number of blocks"), - ) - }) - .runner(|_args: &str, matches: &ArgMatches<'_>| { - listen(matches); - Ok(()) - }), - ) - .add_cmd( - Command::new("shield-funds") - .description("Transfer funds from an on-chain account to an incognito account") - .options(|app| { - app.arg( - Arg::with_name("from") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("Sender's on-chain AccountId in ss58check format"), - ) - .arg( - Arg::with_name("to") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("Recipient's incognito AccountId in ss58check format"), - ) - .arg( - Arg::with_name("amount") - .takes_value(true) - .required(true) - .value_name("U128") - .help("Amount to be transferred"), - ) - .arg( - Arg::with_name("shard") - .takes_value(true) - .required(true) - .value_name("STRING") - .help("Shard identifier"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let chain_api = get_chain_api(matches); - let amount = matches - .value_of("amount") - .unwrap() - .parse() - .expect("amount can't be converted to u128"); - - let shard_opt = match matches.value_of("shard") { - Some(s) => match s.from_base58() { - Ok(s) => ShardIdentifier::decode(&mut &s[..]), - _ => panic!("shard argument must be base58 encoded"), - }, - _ => panic!( - "at least one of `mrenclave` or `shard` arguments must be supplied" - ), - }; - let shard = match shard_opt { - Ok(shard) => shard, - Err(e) => panic!("{}", e), - }; - - // get the sender - let arg_from = matches.value_of("from").unwrap(); - let from = get_pair_from_str(arg_from); - let chain_api = chain_api.set_signer(sr25519_core::Pair::from(from)); - - // get the recipient - let arg_to = matches.value_of("to").unwrap(); - let to = get_accountid_from_str(arg_to); - let (_to_encoded, to_encrypted) = match encode_encrypt(matches, to) { - Ok((encoded, encrypted)) => (encoded, encrypted), - Err(e) => panic!("{}", e), - }; - // compose the extrinsic - let xt: UncheckedExtrinsicV4<([u8; 2], Vec, u128, H256)> = compose_extrinsic!( - chain_api, - TEEREX, - "shield_funds", - to_encrypted, - amount, - shard - ); - - let tx_hash = - chain_api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap(); - println!("[+] TrustedOperation got finalized. Hash: {:?}\n", tx_hash); - Ok(()) - }), - ) - .add_cmd( - Command::new("queue-game") - .description("Sign up to a new game in the game registry") - .options(|app| { - app.arg( - Arg::with_name("who") - .takes_value(true) - .required(true) - .value_name("SS58") - .help("To be registered AccountId in ss58check format"), - ) - }) - .runner(move |_args: &str, matches: &ArgMatches<'_>| { - let chain_api = get_chain_api(matches); - - // get the sender - let arg_who = matches.value_of("who").unwrap(); - let who = get_pair_from_str(arg_who); - let chain_api = chain_api.set_signer(sr25519_core::Pair::from(who)); - - // compose the extrinsic - let xt: UncheckedExtrinsicV4<([u8; 2])> = - compose_extrinsic!(chain_api, REGISTRY, "queue"); - - let tx_hash = - chain_api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap(); - println!( - "[+] Successfully registered player in game queue. Extrinsic Hash: {:?}\n", - tx_hash - ); - Ok(()) - }), - ) - .add_cmd(ita_stf::cli::cmd(&perform_trusted_operation)) - .no_cmd(|_args, _matches| { - println!("No subcommand matched"); - Ok(()) - }) - .run(); - if let Err(e) = res { - println!("{}", e) - } -} - -fn get_chain_api(matches: &ArgMatches<'_>) -> Api { - let url = format!( - "{}:{}", - matches.value_of("node-url").unwrap(), - matches.value_of("node-port").unwrap() - ); - info!("connecting to {}", url); - Api::::new(WsRpcClient::new(&url)).unwrap() -} - -fn perform_trusted_operation(matches: &ArgMatches<'_>, top: &TrustedOperation) -> Option> { - match top { - TrustedOperation::indirect_call(call) => send_request(matches, call.clone()), - TrustedOperation::direct_call(call) => - send_direct_request(matches, TrustedOperation::direct_call(call.clone())), - TrustedOperation::get(getter) => get_state(matches, TrustedOperation::get(getter.clone())), - } -} - -fn get_state(matches: &ArgMatches<'_>, getter: TrustedOperation) -> Option> { - // TODO: ensure getter is signed? - let (_operation_call_encoded, operation_call_encrypted) = match encode_encrypt(matches, getter) - { - Ok((encoded, encrypted)) => (encoded, encrypted), - Err(msg) => { - println!("[Error] {}", msg); - return None - }, - }; - let shard = read_shard(matches).unwrap(); - - // compose jsonrpc call - let data = Request { shard, cyphertext: operation_call_encrypted }; - let rpc_method = "author_submitAndWatchExtrinsic".to_owned(); - let jsonrpc_call: String = RpcRequest::compose_jsonrpc_call(rpc_method, data.encode()); - - let direct_api = get_worker_api_direct(matches); - let (sender, receiver) = channel(); - direct_api.watch(jsonrpc_call, sender); - - loop { - match receiver.recv() { - Ok(response) => { - let response: RpcResponse = serde_json::from_str(&response).unwrap(); - if let Ok(return_value) = RpcReturnValue::decode(&mut response.result.as_slice()) { - if return_value.status == DirectRequestStatus::Error { - println!( - "[Error] {}", - String::decode(&mut return_value.value.as_slice()).unwrap() - ); - return None - } - if !return_value.do_watch { - return match Option::decode(&mut return_value.value.as_slice()) { - Ok(value_opt) => value_opt, - Err(_) => panic!("Error when decoding response"), - } - } - }; - }, - Err(_) => return None, - }; - } +#[derive(Parser)] +#[clap(name = "integritee-cli")] +#[clap(version = VERSION)] +#[clap(author = "Integritee AG ")] +#[clap(about = "interact with integritee-node and workers", long_about = None)] +#[clap(after_help = "stf subcommands depend on the stf crate this has been built against")] +pub struct Cli { + /// node url + #[clap(short = 'u', long, default_value_t = String::from("ws://127.0.0.1"))] + node_url: String, + + /// node port + #[clap(short = 'p', long, default_value_t = String::from("9944"))] + node_port: String, + + /// worker url + #[clap(short = 'U', long, default_value_t = String::from("wss://127.0.0.1"))] + worker_url: String, + + /// worker direct invocation port + #[clap(short = 'P', long, default_value_t = String::from("2000"))] + trusted_worker_port: String, + + #[clap(subcommand)] + command: Commands, } -fn encode_encrypt( - matches: &ArgMatches<'_>, - to_encrypt: E, -) -> Result<(Vec, Vec), String> { - let worker_api_direct = get_worker_api_direct(matches); - let shielding_pubkey: Rsa3072PubKey = match worker_api_direct.get_rsa_pubkey() { - Ok(key) => key, - Err(err_msg) => return Err(err_msg.to_string()), - }; - - let encoded = to_encrypt.encode(); - let mut encrypted: Vec = Vec::new(); - shielding_pubkey.encrypt_buffer(&encoded, &mut encrypted).unwrap(); - Ok((encoded, encrypted)) -} - -fn send_request(matches: &ArgMatches<'_>, call: TrustedCallSigned) -> Option> { - let chain_api = get_chain_api(matches); - let (_, call_encrypted) = match encode_encrypt(matches, call) { - Ok((encoded, encrypted)) => (encoded, encrypted), - Err(msg) => { - println!("[Error]: {}", msg); - return None - }, - }; - - let shard = read_shard(matches).unwrap(); - - let arg_signer = matches.value_of("xt-signer").unwrap(); - let signer = get_pair_from_str(arg_signer); - let _chain_api = chain_api.set_signer(sr25519_core::Pair::from(signer)); - - let request = Request { shard, cyphertext: call_encrypted }; - let xt = compose_extrinsic!(_chain_api, TEEREX, "call_worker", request); - - // send and watch extrinsic until block is executed - let block_hash = - _chain_api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap().unwrap(); - info!( - "Trusted call extrinsic sent and sucessfully included in parentchain block with hash {:?}.", - block_hash - ); - info!("Waiting for execution confirmation from enclave..."); - let (events_in, events_out) = channel(); - _chain_api.subscribe_events(events_in).unwrap(); - - loop { - let ret: ProcessedParentchainBlockArgs = _chain_api - .wait_for_event::( - TEEREX, - "ProcessedParentchainBlock", - None, - &events_out, - ) - .unwrap(); - info!("Confirmation of ProcessedParentchainBlock received"); - debug!("Expected block Hash: {:?}", block_hash); - debug!("Confirmed stf block Hash: {:?}", ret.block_hash); - if ret.block_hash == block_hash { - return Some(ret.block_hash.encode()) - } - } -} - -fn get_worker_api_direct(matches: &ArgMatches<'_>) -> DirectWorkerApi { - let url = format!( - "{}:{}", - matches.value_of("worker-url").unwrap(), - matches.value_of("trusted-worker-port").unwrap() - ); - info!("Connecting to integritee-service-direct-port on '{}'", url); - DirectWorkerApi::new(url) -} - -fn read_shard(matches: &ArgMatches<'_>) -> StdResult { - match matches.value_of("shard") { - Some(s) => match s.from_base58() { - Ok(s) => ShardIdentifier::decode(&mut &s[..]), - _ => panic!("shard argument must be base58 encoded"), - }, - None => match matches.value_of("mrenclave") { - Some(m) => match m.from_base58() { - Ok(s) => ShardIdentifier::decode(&mut &s[..]), - _ => panic!("mrenclave argument must be base58 encoded"), - }, - None => panic!("at least one of `mrenclave` or `shard` arguments must be supplied"), - }, - } -} -/// sends a rpc watch request to the worker api server -fn send_direct_request( - matches: &ArgMatches<'_>, - operation_call: TrustedOperation, -) -> Option> { - let (_operation_call_encoded, operation_call_encrypted) = - match encode_encrypt(matches, operation_call) { - Ok((encoded, encrypted)) => (encoded, encrypted), - Err(msg) => { - println!("[Error] {}", msg); - return None - }, - }; - let shard = read_shard(matches).unwrap(); - - // compose jsonrpc call - let data = Request { shard, cyphertext: operation_call_encrypted }; - let direct_invocation_call = RpcRequest { - jsonrpc: "2.0".to_owned(), - method: "author_submitAndWatchExtrinsic".to_owned(), - params: data.encode(), - id: 1, - }; - let jsonrpc_call: String = serde_json::to_string(&direct_invocation_call).unwrap(); - - debug!("get direct api"); - let direct_api = get_worker_api_direct(matches); - - debug!("setup sender and receiver"); - let (sender, receiver) = channel(); - direct_api.watch(jsonrpc_call, sender); - - debug!("waiting for rpc response"); - loop { - match receiver.recv() { - Ok(response) => { - debug!("received response"); - let response: RpcResponse = serde_json::from_str(&response).unwrap(); - if let Ok(return_value) = RpcReturnValue::decode(&mut response.result.as_slice()) { - debug!("successfully decoded rpc response"); - match return_value.status { - DirectRequestStatus::Error => { - debug!("request status is error"); - if let Ok(value) = String::decode(&mut return_value.value.as_slice()) { - println!("[Error] {}", value); - } - return None - }, - DirectRequestStatus::TrustedOperationStatus(status) => { - debug!("request status is: {:?}", status); - if let Ok(value) = Hash::decode(&mut return_value.value.as_slice()) { - println!("Trusted call {:?} is {:?}", value, status); - } - }, - _ => { - debug!("request status is ignored"); - return None - }, - } - if !return_value.do_watch { - debug!("do watch is false, closing connection"); - return None - } - }; - }, - Err(e) => { - error!("failed to receive rpc response: {:?}", e); - return None - }, - }; - } -} - -#[allow(dead_code)] -#[derive(Decode)] -struct ProcessedParentchainBlockArgs { - signer: AccountId, - block_hash: H256, - merkle_root: H256, -} - -fn listen(matches: &ArgMatches<'_>) { - let api = get_chain_api(matches); - info!("Subscribing to events"); - let (events_in, events_out) = channel(); - let mut count = 0u32; - let mut blocks = 0u32; - api.subscribe_events(events_in).unwrap(); - loop { - if matches.is_present("events") - && count >= value_t!(matches.value_of("events"), u32).unwrap() - { - return - }; - if matches.is_present("blocks") - && blocks > value_t!(matches.value_of("blocks"), u32).unwrap() - { - return - }; - let event_str = events_out.recv().unwrap(); - let _unhex = Vec::from_hex(event_str).unwrap(); - let mut _er_enc = _unhex.as_slice(); - let _events = Vec::>::decode(&mut _er_enc); - blocks += 1; - match _events { - Ok(evts) => - for evr in &evts { - println!("decoded: phase {:?} event {:?}", evr.phase, evr.event); - match &evr.event { - Event::Balances(be) => { - println!(">>>>>>>>>> balances event: {:?}", be); - match &be { - pallet_balances::Event::Transfer { from, to, amount } => { - println!("From: {:?}", from); - println!("To: {:?}", to); - println!("Value: {:?}", amount); - }, - _ => { - debug!("ignoring unsupported balances event"); - }, - } - }, - Event::Teerex(ee) => { - println!(">>>>>>>>>> integritee event: {:?}", ee); - count += 1; - match &ee { - my_node_runtime::pallet_teerex::Event::AddedEnclave( - accountid, - url, - ) => { - println!( - "AddedEnclave: {:?} at url {}", - accountid, - String::from_utf8(url.to_vec()) - .unwrap_or_else(|_| "error".to_string()) - ); - }, - my_node_runtime::pallet_teerex::Event::RemovedEnclave( - accountid, - ) => { - println!("RemovedEnclave: {:?}", accountid); - }, - my_node_runtime::pallet_teerex::Event::Forwarded(shard) => { - println!( - "Forwarded request for shard {}", - shard.encode().to_base58() - ); - }, - my_node_runtime::pallet_teerex::Event::ProcessedParentchainBlock( - accountid, - block_hash, - merkle_root, - ) => { - println!( - "ProcessedParentchainBlock from {} with hash {:?} and merkle root {:?}", - accountid, block_hash, merkle_root - ); - }, - my_node_runtime::pallet_teerex::Event::ProposedSidechainBlock( - accountid, - block_hash, - ) => { - println!( - "ProposedSidechainBlock from {} with hash {:?}", - accountid, block_hash - ); - }, - my_node_runtime::pallet_teerex::Event::ShieldFunds( - incognito_account, - ) => { - println!("ShieldFunds for {:?}", incognito_account); - }, - my_node_runtime::pallet_teerex::Event::UnshieldedFunds( - public_account, - ) => { - println!("UnshieldFunds for {:?}", public_account); - }, - _ => debug!("ignoring unsupported teerex event: {:?}", ee), - } - }, - _ => debug!("ignoring unsupported module event: {:?}", evr.event), - } - }, - Err(_) => error!("couldn't decode event record list"), - } - } -} - -// Subscribes to the pallet_teerex events of type ProcessedParentchainBlock. -pub fn subscribe_to_processed_parentchain_block( - api: Api, -) -> H256 -where - MultiSignature: From, - Client: RpcClient + Subscriber + Send, -{ - let (events_in, events_out) = channel(); - - let _eventsubscriber = thread::Builder::new() - .name("eventsubscriber".to_owned()) - .spawn(move || { - api.subscribe_events(events_in.clone()).unwrap(); - }) - .unwrap(); - - println!("waiting for confirmation event..."); - loop { - let event_str = events_out.recv().unwrap(); - - let _unhex = Vec::from_hex(event_str).unwrap(); - let mut _er_enc = _unhex.as_slice(); - let _events = Vec::>::decode(&mut _er_enc); - if let Ok(evts) = _events { - for evr in &evts { - info!("received event {:?}", evr.event); - if let Event::Teerex(pe) = &evr.event { - if let my_node_runtime::pallet_teerex::Event::ProcessedParentchainBlock( - sender, - block_hash, - _merkle_root, - ) = &pe - { - println!("[+] Received processed parentchain block event from {}", sender); - return block_hash.clone().to_owned() - } else { - debug!("received unknown event from Teerex: {:?}", evr.event) - } - } - } - } - } -} +fn main() { + env_logger::init(); -fn get_accountid_from_str(account: &str) -> AccountId { - match &account[..2] { - "//" => AccountPublic::from(sr25519::Pair::from_string(account, None).unwrap().public()) - .into_account(), - _ => AccountPublic::from(sr25519::Public::from_ss58check(account).unwrap()).into_account(), - } -} + let cli = Cli::parse(); -// get a pair either form keyring (well known keys) or from the store -fn get_pair_from_str(account: &str) -> sr25519::AppPair { - info!("getting pair for {}", account); - match &account[..2] { - "//" => sr25519::AppPair::from_string(account, None).unwrap(), - _ => { - info!("fetching from keystore at {}", &KEYSTORE_PATH); - // open store without password protection - let store = LocalKeystore::open(PathBuf::from(&KEYSTORE_PATH), None) - .expect("store should exist"); - info!("store opened"); - let _pair = store - .key_pair::( - &sr25519::Public::from_ss58check(account).unwrap().into(), - ) - .unwrap() - .unwrap(); - drop(store); - _pair - }, - } + commands::match_command(&cli); } diff --git a/cli/src/trusted_command_utils.rs b/cli/src/trusted_command_utils.rs new file mode 100644 index 0000000000..4b73ee1969 --- /dev/null +++ b/cli/src/trusted_command_utils.rs @@ -0,0 +1,87 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::trusted_commands::TrustedArgs; +use base58::{FromBase58, ToBase58}; +use codec::Encode; +use ita_stf::{AccountId, ShardIdentifier}; +use log::*; +use sp_application_crypto::sr25519; +use sp_core::{crypto::Ss58Codec, sr25519 as sr25519_core, Pair}; +use sp_runtime::traits::IdentifyAccount; +use std::path::PathBuf; +use substrate_client_keystore::LocalKeystore; + +const TRUSTED_KEYSTORE_PATH: &str = "my_trusted_keystore"; + +pub(crate) fn get_keystore_path(trusted_args: &TrustedArgs) -> PathBuf { + let (_mrenclave, shard) = get_identifiers(trusted_args); + PathBuf::from(&format!("{}/{}", TRUSTED_KEYSTORE_PATH, shard.encode().to_base58())) +} + +pub(crate) fn get_identifiers(trusted_args: &TrustedArgs) -> ([u8; 32], ShardIdentifier) { + let mut mrenclave = [0u8; 32]; + mrenclave.copy_from_slice( + &trusted_args + .mrenclave + .from_base58() + .expect("mrenclave has to be base58 encoded"), + ); + let shard = match &trusted_args.shard { + Some(val) => + ShardIdentifier::from_slice(&val.from_base58().expect("shard has to be base58 encoded")), + None => ShardIdentifier::from_slice(&mrenclave), + }; + (mrenclave, shard) +} + +// TODO this function is redundant with client::main +pub(crate) fn get_accountid_from_str(account: &str) -> AccountId { + match &account[..2] { + "//" => sr25519::Pair::from_string(account, None) + .unwrap() + .public() + .into_account() + .into(), + _ => sr25519::Public::from_ss58check(account).unwrap().into_account().into(), + } +} + +// TODO this function is ALMOST redundant with client::main +// get a pair either form keyring (well known keys) or from the store +pub(crate) fn get_pair_from_str(trusted_args: &TrustedArgs, account: &str) -> sr25519_core::Pair { + info!("getting pair for {}", account); + match &account[..2] { + "//" => sr25519_core::Pair::from_string(account, None).unwrap(), + _ => { + info!("fetching from keystore at {}", &TRUSTED_KEYSTORE_PATH); + // open store without password protection + let store = LocalKeystore::open(get_keystore_path(trusted_args), None) + .expect("store should exist"); + info!("store opened"); + let _pair = store + .key_pair::( + &sr25519::Public::from_ss58check(account).unwrap().into(), + ) + .unwrap() + .unwrap(); + info!("key pair fetched"); + drop(store); + _pair.into() + }, + } +} diff --git a/cli/src/trusted_commands.rs b/cli/src/trusted_commands.rs new file mode 100644 index 0000000000..2b0ad97310 --- /dev/null +++ b/cli/src/trusted_commands.rs @@ -0,0 +1,316 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + trusted_command_utils::{ + get_accountid_from_str, get_identifiers, get_keystore_path, get_pair_from_str, + }, + trusted_operation::perform_trusted_operation, + Cli, +}; +use codec::Decode; +use ita_stf::{Index, KeyPair, SgxBoardStruct, TrustedCall, TrustedGetter, TrustedOperation}; +use log::*; +use my_node_runtime::Balance; +use sp_application_crypto::{ed25519, sr25519}; +use sp_core::{crypto::Ss58Codec, sr25519 as sr25519_core, Pair}; +use substrate_client_keystore::{KeystoreExt, LocalKeystore}; + +macro_rules! get_layer_two_nonce { + ($signer_pair:ident, $cli: ident, $trusted_args:ident ) => {{ + let top: TrustedOperation = + TrustedGetter::nonce(sr25519_core::Public::from($signer_pair.public()).into()) + .sign(&KeyPair::Sr25519($signer_pair.clone())) + .into(); + let res = perform_operation($cli, $trusted_args, &top); + let nonce: Index = if let Some(n) = res { + if let Ok(nonce) = Index::decode(&mut n.as_slice()) { + nonce + } else { + 0 + } + } else { + 0 + }; + debug!("got layer two nonce: {:?}", nonce); + nonce + }}; +} + +#[derive(Args)] +pub struct TrustedArgs { + /// targeted worker MRENCLAVE + #[clap(short, long)] + pub(crate) mrenclave: String, + + /// shard identifier + #[clap(short, long)] + pub(crate) shard: Option, + + /// signer for publicly observable extrinsic + #[clap(short='a', long, default_value_t = String::from("//Alice"))] + pub(crate) xt_signer: String, + + /// insert if direct invocation call is desired + #[clap(short, long)] + direct: bool, + + #[clap(subcommand)] + command: TrustedCommands, +} + +#[derive(Subcommand)] +pub enum TrustedCommands { + /// generates a new incognito account for the given shard + NewAccount, + + /// lists all incognito accounts in a given shard + ListAccounts, + + /// send funds from one incognito account to another + Transfer { + /// sender's AccountId in ss58check format + from: String, + + /// recipient's AccountId in ss58check format + to: String, + + /// amount to be transferred + amount: Balance, + }, + + /// ROOT call to set some account balance to an arbitrary number + SetBalance { + /// sender's AccountId in ss58check format + account: String, + + /// amount to be transferred + amount: Balance, + }, + + /// query balance for incognito account in keystore + Balance { + /// AccountId in ss58check format + account: String, + }, + + /// Transfer funds from an incognito account to an parentchain account + UnshieldFunds { + /// Sender's incognito AccountId in ss58check format + from: String, + + /// Recipient's parentchain AccountId in ss58check format + to: String, + + /// amount to be transferred + amount: Balance, + }, + + /// Play a turn on connect four. + PlayTurn { + /// Players's incognito AccountId in ss58check format. + player: String, + + /// Column to be played. + column: u8, + }, + + /// Gets the board state the given player is playing on. + GetBoard { + /// Players's incognito AccountId in ss58check format. + player: String, + }, +} + +pub fn match_trusted_commands(cli: &Cli, trusted_args: &TrustedArgs) { + match &trusted_args.command { + TrustedCommands::NewAccount => new_account(trusted_args), + TrustedCommands::ListAccounts => list_accounts(trusted_args), + TrustedCommands::Transfer { from, to, amount } => + transfer(cli, trusted_args, from, to, amount), + TrustedCommands::SetBalance { account, amount } => + set_balance(cli, trusted_args, account, amount), + TrustedCommands::Balance { account } => balance(cli, trusted_args, account), + TrustedCommands::UnshieldFunds { from, to, amount } => + unshield_funds(cli, trusted_args, from, to, amount), + TrustedCommands::PlayTurn { player, column } => + play_turn(cli, trusted_args, player, *column), + TrustedCommands::GetBoard { player } => get_board(cli, trusted_args, player), + } +} + +fn perform_operation( + cli: &Cli, + trusted_args: &TrustedArgs, + top: &TrustedOperation, +) -> Option> { + perform_trusted_operation(cli, trusted_args, top) +} + +fn new_account(trusted_args: &TrustedArgs) { + let store = LocalKeystore::open(get_keystore_path(trusted_args), None).unwrap(); + let key: sr25519::AppPair = store.generate().unwrap(); + drop(store); + println!("{}", key.public().to_ss58check()); +} + +fn list_accounts(trusted_args: &TrustedArgs) { + let store = LocalKeystore::open(get_keystore_path(trusted_args), None).unwrap(); + info!("sr25519 keys:"); + for pubkey in store.public_keys::().unwrap().into_iter() { + println!("{}", pubkey.to_ss58check()); + } + info!("ed25519 keys:"); + for pubkey in store.public_keys::().unwrap().into_iter() { + println!("{}", pubkey.to_ss58check()); + } + drop(store); +} + +fn transfer(cli: &Cli, trusted_args: &TrustedArgs, arg_from: &str, arg_to: &str, amount: &Balance) { + let from = get_pair_from_str(trusted_args, arg_from); + let to = get_accountid_from_str(arg_to); + info!("from ss58 is {}", from.public().to_ss58check()); + info!("to ss58 is {}", to.to_ss58check()); + + println!("send trusted call transfer from {} to {}: {}", from.public(), to, amount); + let (mrenclave, shard) = get_identifiers(trusted_args); + let nonce = get_layer_two_nonce!(from, cli, trusted_args); + let top: TrustedOperation = TrustedCall::balance_transfer(from.public().into(), to, *amount) + .sign(&KeyPair::Sr25519(from), nonce, &mrenclave, &shard) + .into_trusted_operation(trusted_args.direct); + let _ = perform_operation(cli, trusted_args, &top); +} + +fn set_balance(cli: &Cli, trusted_args: &TrustedArgs, arg_who: &str, amount: &Balance) { + let who = get_pair_from_str(trusted_args, arg_who); + let signer = get_pair_from_str(trusted_args, "//Alice"); + info!("account ss58 is {}", who.public().to_ss58check()); + + println!("send trusted call set-balance({}, {})", who.public(), amount); + + let (mrenclave, shard) = get_identifiers(trusted_args); + let nonce = get_layer_two_nonce!(signer, cli, trusted_args); + let top: TrustedOperation = TrustedCall::balance_set_balance( + signer.public().into(), + who.public().into(), + *amount, + *amount, + ) + .sign(&KeyPair::Sr25519(signer), nonce, &mrenclave, &shard) + .into_trusted_operation(trusted_args.direct); + let _ = perform_operation(cli, trusted_args, &top); +} + +fn balance(cli: &Cli, trusted_args: &TrustedArgs, arg_who: &str) { + debug!("arg_who = {:?}", arg_who); + let who = get_pair_from_str(trusted_args, arg_who); + let top: TrustedOperation = TrustedGetter::free_balance(who.public().into()) + .sign(&KeyPair::Sr25519(who)) + .into(); + let res = perform_operation(cli, trusted_args, &top); + debug!("received result for balance"); + let bal = if let Some(v) = res { + if let Ok(vd) = Balance::decode(&mut v.as_slice()) { + vd + } else { + info!("could not decode value. maybe hasn't been set? {:x?}", v); + 0 + } + } else { + 0 + }; + println!("{}", bal); +} + +fn unshield_funds( + cli: &Cli, + trusted_args: &TrustedArgs, + arg_from: &str, + arg_to: &str, + amount: &Balance, +) { + let from = get_pair_from_str(trusted_args, arg_from); + let to = get_accountid_from_str(arg_to); + println!("from ss58 is {}", from.public().to_ss58check()); + println!("to ss58 is {}", to.to_ss58check()); + + println!("send trusted call unshield_funds from {} to {}: {}", from.public(), to, amount); + + let (mrenclave, shard) = get_identifiers(trusted_args); + let nonce = get_layer_two_nonce!(from, cli, trusted_args); + let top: TrustedOperation = + TrustedCall::balance_unshield(from.public().into(), to, *amount, shard) + .sign(&KeyPair::Sr25519(from), nonce, &mrenclave, &shard) + .into_trusted_operation(trusted_args.direct); + let _ = perform_operation(cli, trusted_args, &top); +} + +fn play_turn(cli: &Cli, trusted_args: &TrustedArgs, arg_player: &str, column: u8) { + let player = get_pair_from_str(trusted_args, arg_player); + println!("player ss58 is {}", player.public().to_ss58check()); + println!("column choice is {:?}", column); + + println!("send trusted call play-turn from {} with column {:?}", player.public(), column); + let (mrenclave, shard) = get_identifiers(trusted_args); + let nonce = get_layer_two_nonce!(player, cli, trusted_args); + + let top: TrustedOperation = TrustedCall::connectfour_play_turn( + sr25519_core::Public::from(player.public()).into(), + column, + ) + .sign(&KeyPair::Sr25519(player), nonce, &mrenclave, &shard) + .into_trusted_operation(trusted_args.direct); + + let _ = perform_operation(cli, trusted_args, &top); +} + +fn get_board(cli: &Cli, trusted_args: &TrustedArgs, arg_player: &str) { + let player = get_pair_from_str(trusted_args, arg_player); + + let key_pair = sr25519_core::Pair::from(player.clone()); + + let top: TrustedOperation = + TrustedGetter::board(sr25519_core::Public::from(player.public()).into()) + .sign(&KeyPair::Sr25519(key_pair)) + .into(); + let res = perform_operation(cli, trusted_args, &top); + debug!("received result for board"); + if let Some(v) = res { + if let Ok(board) = SgxBoardStruct::decode(&mut v.as_slice()) { + println!("Last turn in block number: {}", board.last_turn); + println!("Next player: {}", board.next_player); + println!("Board state: {:?}", board.board_state); + println!("Board:"); + for row in 0..6 { + for column in 0..7 { + print!(" {} ", board.board[column][row]); + } + println!() + } + println!("====================="); + for column in 0..7 { + print!(" {} ", column); + } + println!(); + } else { + println!("could not decode board. maybe hasn't been set? {:x?}", v); + } + } else { + println!("could not fetch board"); + }; +} diff --git a/cli/src/trusted_operation.rs b/cli/src/trusted_operation.rs new file mode 100644 index 0000000000..520b54bc36 --- /dev/null +++ b/cli/src/trusted_operation.rs @@ -0,0 +1,255 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + command_utils::{encode_encrypt, get_chain_api, get_pair_from_str, get_worker_api_direct}, + trusted_commands::TrustedArgs, + Cli, +}; +use base58::FromBase58; +use codec::{Decode, Encode}; +use ita_stf::{ShardIdentifier, TrustedCallSigned, TrustedOperation}; +use itc_rpc_client::direct_client::DirectApi; +use itp_node_api_extensions::TEEREX; +use itp_types::{ + DirectRequestStatus, RpcRequest, RpcResponse, RpcReturnValue, TrustedOperationStatus, +}; +use log::*; +use my_node_runtime::{AccountId, Hash}; +use sp_core::{sr25519 as sr25519_core, Pair, H256}; +use std::{result::Result as StdResult, sync::mpsc::channel}; +use substrate_api_client::{compose_extrinsic, XtStatus}; +use teerex_primitives::Request; + +pub fn perform_trusted_operation( + cli: &Cli, + trusted_args: &TrustedArgs, + top: &TrustedOperation, +) -> Option> { + match top { + TrustedOperation::indirect_call(call) => send_request(cli, trusted_args, call.clone()), + TrustedOperation::direct_call(call) => + send_direct_request(cli, trusted_args, TrustedOperation::direct_call(call.clone())), + TrustedOperation::get(getter) => + get_state(cli, trusted_args, TrustedOperation::get(getter.clone())), + } +} + +fn get_state(cli: &Cli, trusted_args: &TrustedArgs, getter: TrustedOperation) -> Option> { + // TODO: ensure getter is signed? + let (_operation_call_encoded, operation_call_encrypted) = match encode_encrypt(cli, getter) { + Ok((encoded, encrypted)) => (encoded, encrypted), + Err(msg) => { + println!("[Error] {}", msg); + return None + }, + }; + let shard = read_shard(trusted_args).unwrap(); + + // compose jsonrpc call + let data = Request { shard, cyphertext: operation_call_encrypted }; + let rpc_method = "author_submitAndWatchExtrinsic".to_owned(); + let jsonrpc_call: String = RpcRequest::compose_jsonrpc_call(rpc_method, data.encode()); + + let direct_api = get_worker_api_direct(cli); + let (sender, receiver) = channel(); + direct_api.watch(jsonrpc_call, sender); + + loop { + match receiver.recv() { + Ok(response) => { + let response: RpcResponse = serde_json::from_str(&response).unwrap(); + if let Ok(return_value) = RpcReturnValue::decode(&mut response.result.as_slice()) { + if return_value.status == DirectRequestStatus::Error { + println!( + "[Error] {}", + String::decode(&mut return_value.value.as_slice()).unwrap() + ); + return None + } + if !return_value.do_watch { + return match Option::decode(&mut return_value.value.as_slice()) { + Ok(value_opt) => value_opt, + Err(_) => panic!("Error when decoding response"), + } + } + }; + }, + Err(_) => return None, + }; + } +} + +fn send_request(cli: &Cli, trusted_args: &TrustedArgs, call: TrustedCallSigned) -> Option> { + let chain_api = get_chain_api(cli); + let (_, call_encrypted) = match encode_encrypt(cli, call) { + Ok((encoded, encrypted)) => (encoded, encrypted), + Err(msg) => { + println!("[Error]: {}", msg); + return None + }, + }; + + let shard = read_shard(trusted_args).unwrap(); + + let arg_signer = &trusted_args.xt_signer; + let signer = get_pair_from_str(arg_signer); + let _chain_api = chain_api.set_signer(sr25519_core::Pair::from(signer)); + + let request = Request { shard, cyphertext: call_encrypted }; + let xt = compose_extrinsic!(_chain_api, TEEREX, "call_worker", request); + + // send and watch extrinsic until block is executed + let block_hash = + _chain_api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock).unwrap().unwrap(); + info!( + "Trusted call extrinsic sent and sucessfully included in parentchain block with hash {:?}.", + block_hash + ); + info!("Waiting for execution confirmation from enclave..."); + let (events_in, events_out) = channel(); + _chain_api.subscribe_events(events_in).unwrap(); + + loop { + let ret: ProcessedParentchainBlockArgs = _chain_api + .wait_for_event::( + TEEREX, + "ProcessedParentchainBlock", + None, + &events_out, + ) + .unwrap(); + info!("Confirmation of ProcessedParentchainBlock received"); + debug!("Expected block Hash: {:?}", block_hash); + debug!("Confirmed stf block Hash: {:?}", ret.block_hash); + if ret.block_hash == block_hash { + return Some(ret.block_hash.encode()) + } + } +} + +fn read_shard(trusted_args: &TrustedArgs) -> StdResult { + match &trusted_args.shard { + Some(s) => match s.from_base58() { + Ok(s) => ShardIdentifier::decode(&mut &s[..]), + _ => panic!("shard argument must be base58 encoded"), + }, + None => match trusted_args.mrenclave.from_base58() { + Ok(s) => ShardIdentifier::decode(&mut &s[..]), + _ => panic!("mrenclave argument must be base58 encoded"), + }, + } +} + +/// sends a rpc watch request to the worker api server +fn send_direct_request( + cli: &Cli, + trusted_args: &TrustedArgs, + operation_call: TrustedOperation, +) -> Option> { + let (_operation_call_encoded, operation_call_encrypted) = + match encode_encrypt(cli, operation_call) { + Ok((encoded, encrypted)) => (encoded, encrypted), + Err(msg) => { + println!("[Error] {}", msg); + return None + }, + }; + let shard = read_shard(trusted_args).unwrap(); + + // compose jsonrpc call + let data = Request { shard, cyphertext: operation_call_encrypted }; + let direct_invocation_call = RpcRequest { + jsonrpc: "2.0".to_owned(), + method: "author_submitAndWatchExtrinsic".to_owned(), + params: data.encode(), + id: 1, + }; + let jsonrpc_call: String = serde_json::to_string(&direct_invocation_call).unwrap(); + + debug!("get direct api"); + let direct_api = get_worker_api_direct(cli); + + debug!("setup sender and receiver"); + let (sender, receiver) = channel(); + direct_api.watch(jsonrpc_call, sender); + + debug!("waiting for rpc response"); + loop { + match receiver.recv() { + Ok(response) => { + debug!("received response"); + let response: RpcResponse = serde_json::from_str(&response).unwrap(); + if let Ok(return_value) = RpcReturnValue::decode(&mut response.result.as_slice()) { + debug!("successfully decoded rpc response"); + match return_value.status { + DirectRequestStatus::Error => { + debug!("request status is error"); + if let Ok(value) = String::decode(&mut return_value.value.as_slice()) { + println!("[Error] {}", value); + } + direct_api.close().unwrap(); + return None + }, + DirectRequestStatus::TrustedOperationStatus(status) => { + debug!("request status is: {:?}", status); + if let Ok(value) = Hash::decode(&mut return_value.value.as_slice()) { + println!("Trusted call {:?} is {:?}", value, status); + } + if connection_can_be_closed(status) { + direct_api.close().unwrap(); + } + }, + _ => { + debug!("request status is ignored"); + direct_api.close().unwrap(); + return None + }, + } + if !return_value.do_watch { + debug!("do watch is false, closing connection"); + direct_api.close().unwrap(); + return None + } + }; + }, + Err(e) => { + error!("failed to receive rpc response: {:?}", e); + direct_api.close().unwrap(); + return None + }, + }; + } +} + +fn connection_can_be_closed(top_status: TrustedOperationStatus) -> bool { + !matches!( + top_status, + TrustedOperationStatus::Submitted + | TrustedOperationStatus::Future + | TrustedOperationStatus::Ready + | TrustedOperationStatus::Broadcast + ) +} + +#[allow(dead_code)] +#[derive(Decode)] +struct ProcessedParentchainBlockArgs { + signer: AccountId, + block_hash: H256, + merkle_root: H256, +} diff --git a/core-primitives/block-import-queue/Cargo.toml b/core-primitives/block-import-queue/Cargo.toml index 3f056c4a6c..6f8e5e5c25 100644 --- a/core-primitives/block-import-queue/Cargo.toml +++ b/core-primitives/block-import-queue/Cargo.toml @@ -18,7 +18,7 @@ thiserror = { version = "1.0", optional = true } # crates.io no-std compatible libraries log = { version = "0.4", default-features = false } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } [features] default = ["std"] diff --git a/core-primitives/component-container/Cargo.toml b/core-primitives/component-container/Cargo.toml index c266e8d9e8..5e02ef751c 100644 --- a/core-primitives/component-container/Cargo.toml +++ b/core-primitives/component-container/Cargo.toml @@ -9,10 +9,19 @@ resolver = "2" # sgx dependencies sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } +# sgx enabled external libraries +thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linux/thiserror-sgx", tag = "sgx_1.1.3", optional = true } + +# std compatible external libraries (make sure these versions match with the sgx-enabled ones above) +thiserror = { version = "1.0", optional = true } + [features] default = ["std"] -std = [] +std = [ + "thiserror", +] sgx = [ # sgx "sgx_tstd", + "thiserror_sgx", ] diff --git a/core-primitives/component-container/src/component_container.rs b/core-primitives/component-container/src/component_container.rs index b6f4e090f1..c886b10784 100644 --- a/core-primitives/component-container/src/component_container.rs +++ b/core-primitives/component-container/src/component_container.rs @@ -23,8 +23,16 @@ use std::sync::SgxMutex as Mutex; #[cfg(feature = "std")] use std::sync::Mutex; -use crate::atomic_container::AtomicContainer; -use std::{marker::PhantomData, sync::Arc}; +use crate::{ + atomic_container::AtomicContainer, + error::{Error, Result}, +}; +use std::{ + format, + marker::PhantomData, + string::{String, ToString}, + sync::Arc, +}; /// Trait to initialize a generic component. pub trait ComponentInitializer { @@ -38,7 +46,7 @@ pub trait ComponentGetter { type ComponentType; /// Try to get a specific component, returns `None` if component has not been initialized. - fn get(&self) -> Option>; + fn get(&self) -> Result>; } /// Workaround to make `new()` a `const fn`. @@ -48,6 +56,7 @@ struct Invariant(T); /// Component container implementation. Can be used in a global static context. pub struct ComponentContainer { container: AtomicContainer, + component_name: &'static str, _phantom: PhantomData>, } @@ -55,8 +64,12 @@ impl ComponentContainer { /// Create a new container instance. /// /// Has to be `const` in order to be used in a `static` context. - pub const fn new() -> Self { - ComponentContainer { container: AtomicContainer::new(), _phantom: PhantomData } + pub const fn new(component_name: &'static str) -> Self { + ComponentContainer { + container: AtomicContainer::new(), + component_name, + _phantom: PhantomData, + } } } @@ -68,11 +81,20 @@ impl ComponentInitializer for ComponentContainer { } } +impl ToString for ComponentContainer { + fn to_string(&self) -> String { + format!("{} component", self.component_name) + } +} + impl ComponentGetter for ComponentContainer { type ComponentType = Component; - fn get(&self) -> Option> { - let component_mutex: &Mutex> = self.container.load()?; - Some(component_mutex.lock().unwrap().clone()) + fn get(&self) -> Result> { + let component_mutex: &Mutex> = self + .container + .load() + .ok_or_else(|| Error::ComponentNotInitialized(self.to_string()))?; + Ok(component_mutex.lock().unwrap().clone()) } } diff --git a/sidechain/top-pool-rpc-author/src/pool_types.rs b/core-primitives/component-container/src/error.rs similarity index 51% rename from sidechain/top-pool-rpc-author/src/pool_types.rs rename to core-primitives/component-container/src/error.rs index 693340d123..9ca0ac0b20 100644 --- a/sidechain/top-pool-rpc-author/src/pool_types.rs +++ b/core-primitives/component-container/src/error.rs @@ -15,19 +15,18 @@ */ -use crate::api::SidechainApi; -use itc_direct_rpc_server::{ - rpc_connection_registry::ConnectionRegistry, rpc_responder::RpcResponder, -}; -use itc_tls_websocket_server::connection::TungsteniteWsConnection; -use itp_types::Block; -use its_top_pool::basic_pool::BasicPool; +#[cfg(all(not(feature = "std"), feature = "sgx"))] +use crate::sgx_reexport_prelude::*; -type Hash = sp_core::H256; +use std::{boxed::Box, string::String}; -pub(crate) type EnclaveRpcConnectionRegistry = ConnectionRegistry; +pub type Result = core::result::Result; -pub type EnclaveRpcResponder = - RpcResponder; - -pub type BPool = BasicPool, Block, EnclaveRpcResponder>; +/// extrinsics factory error +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Component is not initialized: {0}")] + ComponentNotInitialized(String), + #[error(transparent)] + Other(#[from] Box), +} diff --git a/core-primitives/component-container/src/lib.rs b/core-primitives/component-container/src/lib.rs index 83cdc34457..9c684e4361 100644 --- a/core-primitives/component-container/src/lib.rs +++ b/core-primitives/component-container/src/lib.rs @@ -23,7 +23,14 @@ compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the sam #[cfg(all(not(feature = "std"), feature = "sgx"))] extern crate sgx_tstd as std; +// re-export module to properly feature gate sgx and regular std environment +#[cfg(all(not(feature = "std"), feature = "sgx"))] +pub mod sgx_reexport_prelude { + pub use thiserror_sgx as thiserror; +} + mod atomic_container; pub mod component_container; +pub mod error; pub use component_container::*; diff --git a/core-primitives/enclave-api/Cargo.toml b/core-primitives/enclave-api/Cargo.toml index 0363c046e3..ca4702bdf4 100644 --- a/core-primitives/enclave-api/Cargo.toml +++ b/core-primitives/enclave-api/Cargo.toml @@ -8,16 +8,16 @@ edition = "2018" thiserror = "1.0.25" log = "0.4" serde_json = "1.0" -codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", features = ["derive"] } sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } sgx_urts = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } sgx_crypto_helper = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } -frame-support = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +frame-support = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } itp-enclave-api-ffi = { path = "ffi" } itp-settings = { path = "../settings" } diff --git a/core-primitives/enclave-api/ffi/src/lib.rs b/core-primitives/enclave-api/ffi/src/lib.rs index cb034d1fa4..d569d07d1a 100644 --- a/core-primitives/enclave-api/ffi/src/lib.rs +++ b/core-primitives/enclave-api/ffi/src/lib.rs @@ -24,6 +24,11 @@ extern "C" { value_size: u32, ) -> sgx_status_t; + pub fn init_enclave_sidechain_components( + eid: sgx_enclave_id_t, + retval: *mut sgx_status_t, + ) -> sgx_status_t; + pub fn init_direct_invocation_server( eid: sgx_enclave_id_t, retval: *mut sgx_status_t, @@ -44,6 +49,13 @@ extern "C" { latest_header_size: usize, ) -> sgx_status_t; + pub fn init_shard( + eid: sgx_enclave_id_t, + retval: *mut sgx_status_t, + shard: *const u8, + shard_size: u32, + ) -> sgx_status_t; + pub fn trigger_parentchain_block_import( eid: sgx_enclave_id_t, retval: *mut sgx_status_t, diff --git a/core-primitives/enclave-api/src/enclave_base.rs b/core-primitives/enclave-api/src/enclave_base.rs index 7ec6ecb9a4..d2b688369e 100644 --- a/core-primitives/enclave-api/src/enclave_base.rs +++ b/core-primitives/enclave-api/src/enclave_base.rs @@ -35,6 +35,9 @@ pub trait EnclaveBase: Send + Sync + 'static { /// Initialize the enclave (needs to be called once at application startup). fn init(&self, mu_ra_addr: &str, untrusted_worker_addr: &str) -> EnclaveResult<()>; + /// Initialize the enclave sidechain components. + fn init_enclave_sidechain_components(&self) -> EnclaveResult<()>; + /// Initialize the direct invocation RPC server. fn init_direct_invocation_server(&self, rpc_server_addr: String) -> EnclaveResult<()>; @@ -46,6 +49,9 @@ pub trait EnclaveBase: Send + Sync + 'static { authority_proof: Vec>, ) -> EnclaveResult; + /// Initialize a new shard. + fn init_shard(&self, shard: Vec) -> EnclaveResult<()>; + /// Trigger the import of parentchain block explicitly. Used when initializing a light-client /// with a triggered import dispatcher. fn trigger_parentchain_block_import(&self) -> EnclaveResult<()>; @@ -86,6 +92,17 @@ impl EnclaveBase for Enclave { Ok(()) } + fn init_enclave_sidechain_components(&self) -> EnclaveResult<()> { + let mut retval = sgx_status_t::SGX_SUCCESS; + + let result = unsafe { ffi::init_enclave_sidechain_components(self.eid, &mut retval) }; + + ensure!(result == sgx_status_t::SGX_SUCCESS, Error::Sgx(result)); + ensure!(retval == sgx_status_t::SGX_SUCCESS, Error::Sgx(retval)); + + Ok(()) + } + fn init_direct_invocation_server(&self, rpc_server_addr: String) -> EnclaveResult<()> { let mut retval = sgx_status_t::SGX_SUCCESS; @@ -131,6 +148,18 @@ impl EnclaveBase for Enclave { Ok(latest) } + fn init_shard(&self, shard: Vec) -> EnclaveResult<()> { + let mut retval = sgx_status_t::SGX_SUCCESS; + + let result = + unsafe { ffi::init_shard(self.eid, &mut retval, shard.as_ptr(), shard.len() as u32) }; + + ensure!(result == sgx_status_t::SGX_SUCCESS, Error::Sgx(result)); + ensure!(retval == sgx_status_t::SGX_SUCCESS, Error::Sgx(retval)); + + Ok(()) + } + fn trigger_parentchain_block_import(&self) -> EnclaveResult<()> { let mut retval = sgx_status_t::SGX_SUCCESS; diff --git a/core-primitives/enclave-metrics/Cargo.toml b/core-primitives/enclave-metrics/Cargo.toml index fd28fd38ea..4d7882c22d 100644 --- a/core-primitives/enclave-metrics/Cargo.toml +++ b/core-primitives/enclave-metrics/Cargo.toml @@ -12,7 +12,7 @@ sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-s sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } # no-std dependencies -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } [features] default = ["std"] diff --git a/core-primitives/extrinsics-factory/Cargo.toml b/core-primitives/extrinsics-factory/Cargo.toml index 474f5d5a26..6eeda00216 100644 --- a/core-primitives/extrinsics-factory/Cargo.toml +++ b/core-primitives/extrinsics-factory/Cargo.toml @@ -27,7 +27,7 @@ mocks = [] # sgx dependencies sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } -substrate-api-client = { default-features = false, git = "https://github.com/scs/substrate-api-client", branch = "master" } +substrate-api-client = { default-features = false, git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } # local dependencies itp-nonce-cache = { path = "../nonce-cache", default-features = false } @@ -42,6 +42,6 @@ thiserror = { version = "1.0", optional = true } # no-std dependencies log = { version = "0.4", default-features = false } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} diff --git a/core-primitives/node-api-extensions/Cargo.toml b/core-primitives/node-api-extensions/Cargo.toml index 33a24b4204..ef40937a3d 100644 --- a/core-primitives/node-api-extensions/Cargo.toml +++ b/core-primitives/node-api-extensions/Cargo.toml @@ -6,16 +6,16 @@ edition = "2018" [dependencies] # crates.io -codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", features = ["derive"] } thiserror = "1.0" # substrate -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } # scs -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master" } +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } # integritee itp-types = { path = "../types" } diff --git a/core-primitives/ocall-api/Cargo.toml b/core-primitives/ocall-api/Cargo.toml index 5fbc1de861..82cc665ad8 100644 --- a/core-primitives/ocall-api/Cargo.toml +++ b/core-primitives/ocall-api/Cargo.toml @@ -6,23 +6,28 @@ edition = "2018" resolver = "2" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } +derive_more = { version = "0.99.5" } -# substrate deps -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} - -# sgx-deps +# sgx deps sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } +# substrate deps +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} + # local deps +itp-storage = { path = "../storage", default-features = false } itp-types = { path = "../types", default-features = false } [features] default = ["std"] std = [ "codec/std", + "sp-core/std", "sp-runtime/std", "sp-std/std", + "itp-storage/std", "itp-types/std", ] diff --git a/core-primitives/ocall-api/src/lib.rs b/core-primitives/ocall-api/src/lib.rs index feb8d9fe72..2ccedc3c34 100644 --- a/core-primitives/ocall-api/src/lib.rs +++ b/core-primitives/ocall-api/src/lib.rs @@ -21,12 +21,25 @@ pub extern crate alloc; use alloc::vec::Vec; use codec::{Decode, Encode}; +use core::result::Result as StdResult; +use derive_more::{Display, From}; +use itp_storage::{Error as StorageError, StorageEntryVerified}; use itp_types::{ BlockHash, ShardIdentifier, TrustedOperationStatus, WorkerRequest, WorkerResponse, }; use sgx_types::*; -use sp_runtime::OpaqueExtrinsic; +use sp_core::H256; +use sp_runtime::{traits::Header, OpaqueExtrinsic}; +use sp_std::prelude::*; + +#[derive(Debug, Display, From)] +pub enum Error { + Storage(StorageError), + Codec(codec::Error), + Sgx(sgx_types::sgx_status_t), +} +pub type Result = StdResult; /// Trait for the enclave to make o-calls related to remote attestation pub trait EnclaveAttestationOCallApi: Clone + Send + Sync { fn sgx_init_quote(&self) -> SgxResult<(sgx_target_info_t, sgx_epid_group_id_t)>; @@ -70,6 +83,18 @@ pub trait EnclaveOnChainOCallApi: Clone + Send + Sync { &self, req: Vec, ) -> SgxResult>>; + + fn get_storage_verified, V: Decode>( + &self, + storage_hash: Vec, + header: &H, + ) -> Result>; + + fn get_multiple_storages_verified, V: Decode>( + &self, + storage_hashes: Vec>, + header: &H, + ) -> Result>>; } /// Trait for sending metric updates. diff --git a/core-primitives/primitives-cache/src/lib.rs b/core-primitives/primitives-cache/src/lib.rs index 781a082b6e..e4a2724e3f 100644 --- a/core-primitives/primitives-cache/src/lib.rs +++ b/core-primitives/primitives-cache/src/lib.rs @@ -18,7 +18,7 @@ //! Stores all primitives of the enclave that do need to be accessed often, but are //! not be frequently mutated, such as keys and server urls. //! -//! TODO: For now only the mu-ra server and untrusted wokrer url is stored here. Keys and such could also be stored here. +//! TODO: For now only the mu-ra server and untrusted worker url is stored here. Keys and such could also be stored here. #![cfg_attr(not(feature = "std"), no_std)] #![feature(assert_matches)] @@ -47,15 +47,12 @@ use std::sync::SgxRwLockWriteGuard as RwLockWriteGuard; use crate::error::Result; use lazy_static::lazy_static; -use std::{ - string::{String, ToString}, - sync::Arc, -}; +use std::{string::String, sync::Arc}; pub use primitives_cache::PrimitivesCache; lazy_static! { - /// Global instance of the primitves cache. + /// Global instance of the primitives cache. /// /// Concurrent access is managed internally, using RW locks. pub static ref GLOBAL_PRIMITIVES_CACHE: Arc = Default::default(); @@ -71,11 +68,8 @@ pub struct Primitives { } impl Primitives { - pub fn new(mu_ra_url: &str, untrusted_worker_url: &str) -> Primitives { - Primitives { - mu_ra_url: mu_ra_url.to_string(), - untrusted_worker_url: untrusted_worker_url.to_string(), - } + pub fn new(mu_ra_url: String, untrusted_worker_url: String) -> Primitives { + Primitives { mu_ra_url, untrusted_worker_url } } pub fn mu_ra_url(&self) -> &str { @@ -108,8 +102,8 @@ pub trait GetPrimitives { // Helper function to set primitives of a given cache. pub fn set_primitives( cache: &E, - mu_ra_url: &str, - untrusted_worker_url: &str, + mu_ra_url: String, + untrusted_worker_url: String, ) -> Result<()> { let primitives = Primitives::new(mu_ra_url, untrusted_worker_url); let mut rw_lock = cache.load_for_mutation()?; diff --git a/core-primitives/primitives-cache/src/primitives_cache.rs b/core-primitives/primitives-cache/src/primitives_cache.rs index 569cc22d4b..40bc516f51 100644 --- a/core-primitives/primitives-cache/src/primitives_cache.rs +++ b/core-primitives/primitives-cache/src/primitives_cache.rs @@ -81,8 +81,8 @@ pub mod tests { pub fn set_primitives_works() { let cache = PrimitivesCache::default(); let mut lock = cache.load_for_mutation().unwrap(); - let mu_ra_url = "hello"; - let untrusted_url = "world"; + let mu_ra_url = "hello".to_string(); + let untrusted_url = "world".to_string(); let primitives = Primitives::new(mu_ra_url, untrusted_url); *lock = primitives.clone(); std::mem::drop(lock); @@ -92,8 +92,8 @@ pub mod tests { #[test] pub fn concurrent_read_access_blocks_until_write_is_done() { let cache = Arc::new(PrimitivesCache::default()); - let mu_ra_url = "hello"; - let untrusted_url = "world"; + let mu_ra_url = "hello".to_string(); + let untrusted_url = "world".to_string(); let primitives = Primitives::new(mu_ra_url, untrusted_url); let mut write_lock = cache.load_for_mutation().unwrap(); diff --git a/core-primitives/registry-storage/Cargo.toml b/core-primitives/registry-storage/Cargo.toml index be6cde1505..a7de4cd4c9 100644 --- a/core-primitives/registry-storage/Cargo.toml +++ b/core-primitives/registry-storage/Cargo.toml @@ -5,15 +5,15 @@ authors = ["Integritee AG "] edition = "2018" [dependencies] -codec = { version = "2.0.0", default-features = false, features = ["derive"], package = "parity-scale-codec" } -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +codec = { version = "3.0.0", default-features = false, features = ["derive"], package = "parity-scale-codec" } +sp-std = { default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} #local deps itp-storage = { path = "../storage", default-features = false } itp-types = { path = "../types", default-features = false } # node deps -pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } [features] @@ -22,4 +22,4 @@ std = [ "sp-std/std", "itp-storage/std", "pallet-ajuna-gameregistry/std", -] \ No newline at end of file +] diff --git a/core-primitives/settings/src/lib.rs b/core-primitives/settings/src/lib.rs index 80b0868c02..f168d13599 100644 --- a/core-primitives/settings/src/lib.rs +++ b/core-primitives/settings/src/lib.rs @@ -53,6 +53,7 @@ pub mod files { pub static RA_API_KEY_FILE: &str = "key.txt"; pub const SPID_MIN_LENGTH: usize = 32; + pub const STATE_SNAPSHOTS_CACHE_SIZE: usize = 120; } /// Settings concerning the worker @@ -98,19 +99,20 @@ pub mod enclave { pub mod node { // you may have to update these indices upon new builds of the runtime // you can get the index from metadata, counting modules starting with zero - pub static TEEREX_MODULE: u8 = 9u8; + pub static TEEREX_MODULE: u8 = 19u8; pub static REGISTER_ENCLAVE: u8 = 0u8; //pub static UNREGISTER_ENCLAVE: u8 = 1u8; pub static CALL_WORKER: u8 = 2u8; pub static PROCESSED_PARENTCHAIN_BLOCK: u8 = 3u8; pub static PROPOSED_SIDECHAIN_BLOCK: u8 = 4u8; pub static SHIELD_FUNDS: u8 = 5u8; + pub static UNSHIELD: u8 = 6u8; - pub static GAME_REGISTRY_MODULE: u8 = 13u8; + pub static GAME_REGISTRY_MODULE: u8 = 17u8; pub static ACK_GAME: u8 = 2u8; pub static FINISH_GAME: u8 = 4u8; + // bump this to be consistent with integritee-node runtime pub static RUNTIME_SPEC_VERSION: u32 = 100; pub static RUNTIME_TRANSACTION_VERSION: u32 = 1; - pub static UNSHIELD: u8 = 6u8; } diff --git a/core-primitives/sgx/crypto/Cargo.toml b/core-primitives/sgx/crypto/Cargo.toml index 7c26cbf606..57ab5d2c15 100644 --- a/core-primitives/sgx/crypto/Cargo.toml +++ b/core-primitives/sgx/crypto/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" [dependencies] aes = { version = "0.6.0" } ofb = { version = "0.4.0" } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } derive_more = { version = "0.99.5" } log = { version = "0.4.14", default-features = false } serde = { version = "1.0", default-features = false, features = ["alloc"], optional = true } @@ -23,7 +23,7 @@ serde-sgx = { package = "serde", tag = "sgx_1.1.3", git = "https://github.com/me serde_json-sgx = { package = "serde_json", tag = "sgx_1.1.3", git = "https://github.com/mesalock-linux/serde-json-sgx", optional = true } # substrate deps -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # local deps itp-settings = { path = "../../settings" } diff --git a/core-primitives/sgx/crypto/src/aes.rs b/core-primitives/sgx/crypto/src/aes.rs index 861146d0e4..e9d3ebef0a 100644 --- a/core-primitives/sgx/crypto/src/aes.rs +++ b/core-primitives/sgx/crypto/src/aes.rs @@ -30,7 +30,7 @@ use std::convert::{TryFrom, TryInto}; type AesOfb = Ofb; -#[derive(Debug, Default, Encode, Decode, Clone, Copy)] +#[derive(Debug, Default, Encode, Decode, Clone, Copy, PartialEq, Eq)] pub struct Aes { pub key: [u8; 16], pub init_vec: [u8; 16], @@ -78,24 +78,37 @@ pub mod sgx { use super::*; use itp_settings::files::AES_KEY_FILE_AND_INIT_V; - use itp_sgx_io::{seal, unseal, SealedIO}; + use itp_sgx_io::{seal, unseal, SealedIO, StaticSealedIO}; use log::info; use sgx_rand::{Rng, StdRng}; use std::sgxfs::SgxFile; - impl SealedIO for AesSeal { + impl StaticSealedIO for AesSeal { type Error = Error; type Unsealed = Aes; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { Ok(unseal(AES_KEY_FILE_AND_INIT_V).map(|b| Decode::decode(&mut b.as_slice()))??) } - fn seal(unsealed: Self::Unsealed) -> Result<()> { + fn seal_to_static_file(unsealed: Self::Unsealed) -> Result<()> { Ok(unsealed.using_encoded(|bytes| seal(bytes, AES_KEY_FILE_AND_INIT_V))?) } } + impl SealedIO for AesSeal { + type Error = Error; + type Unsealed = Aes; + + fn unseal(&self) -> Result { + Self::unseal_from_static_file() + } + + fn seal(&self, unsealed: Self::Unsealed) -> Result<()> { + Self::seal_to_static_file(unsealed) + } + } + pub fn create_sealed_if_absent() -> Result<()> { if SgxFile::open(AES_KEY_FILE_AND_INIT_V).is_err() { info!("[Enclave] Keyfile not found, creating new! {}", AES_KEY_FILE_AND_INIT_V); @@ -112,6 +125,6 @@ pub mod sgx { rand.fill_bytes(&mut key); rand.fill_bytes(&mut iv); - AesSeal::seal(Aes::new(key, iv)) + AesSeal::seal_to_static_file(Aes::new(key, iv)) } } diff --git a/core-primitives/sgx/crypto/src/ed25519.rs b/core-primitives/sgx/crypto/src/ed25519.rs index b179e10c8f..c9752cadb5 100644 --- a/core-primitives/sgx/crypto/src/ed25519.rs +++ b/core-primitives/sgx/crypto/src/ed25519.rs @@ -30,17 +30,17 @@ pub mod sgx { use crate::error::{Error, Result}; use codec::Encode; use itp_settings::files::SEALED_SIGNER_SEED_FILE; - use itp_sgx_io::{seal, unseal, SealedIO}; + use itp_sgx_io::{seal, unseal, SealedIO, StaticSealedIO}; use log::*; use sgx_rand::{Rng, StdRng}; use sp_core::{crypto::Pair, ed25519}; use std::{path::Path, sgxfs::SgxFile}; - impl SealedIO for Ed25519Seal { + impl StaticSealedIO for Ed25519Seal { type Error = Error; type Unsealed = ed25519::Pair; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { let raw = unseal(SEALED_SIGNER_SEED_FILE)?; let key = ed25519::Pair::from_seed_slice(&raw) @@ -49,11 +49,24 @@ pub mod sgx { Ok(key.into()) } - fn seal(unsealed: Self::Unsealed) -> Result<()> { + fn seal_to_static_file(unsealed: Self::Unsealed) -> Result<()> { Ok(unsealed.seed().using_encoded(|bytes| seal(bytes, SEALED_SIGNER_SEED_FILE))?) } } + impl SealedIO for Ed25519Seal { + type Error = Error; + type Unsealed = ed25519::Pair; + + fn unseal(&self) -> Result { + Self::unseal_from_static_file() + } + + fn seal(&self, unsealed: Self::Unsealed) -> Result<()> { + Self::seal_to_static_file(unsealed) + } + } + pub fn create_sealed_if_absent() -> Result<()> { if SgxFile::open(SEALED_SIGNER_SEED_FILE).is_err() { if Path::new(SEALED_SIGNER_SEED_FILE).exists() { diff --git a/core-primitives/sgx/crypto/src/lib.rs b/core-primitives/sgx/crypto/src/lib.rs index 68c7579b3d..58721611fa 100644 --- a/core-primitives/sgx/crypto/src/lib.rs +++ b/core-primitives/sgx/crypto/src/lib.rs @@ -33,5 +33,5 @@ pub use self::rsa3072::*; pub use error::*; pub use traits::*; -#[cfg(all(feature = "mocks", feature = "sgx"))] +#[cfg(feature = "mocks")] pub mod mocks; diff --git a/core-primitives/sgx/crypto/src/mocks.rs b/core-primitives/sgx/crypto/src/mocks.rs index 539b0a234b..fdb4864989 100644 --- a/core-primitives/sgx/crypto/src/mocks.rs +++ b/core-primitives/sgx/crypto/src/mocks.rs @@ -15,41 +15,72 @@ */ +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + use crate::{ + aes::Aes, error::{Error, Result}, - Aes, }; -use itp_sgx_io::SealedIO; -use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; +use itp_sgx_io::{SealedIO, StaticSealedIO}; #[derive(Default)] -pub struct AesSealMock {} +pub struct AesSealMock { + aes: RwLock, +} -impl SealedIO for AesSealMock { +impl StaticSealedIO for AesSealMock { type Error = Error; type Unsealed = Aes; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { Ok(Aes::default()) } - fn seal(_unsealed: Self::Unsealed) -> Result<()> { + fn seal_to_static_file(_unsealed: Self::Unsealed) -> Result<()> { Ok(()) } } -#[derive(Default)] -pub struct Rsa3072SealMock {} - -impl SealedIO for Rsa3072SealMock { +impl SealedIO for AesSealMock { type Error = Error; - type Unsealed = Rsa3072KeyPair; + type Unsealed = Aes; - fn unseal() -> Result { - Ok(Rsa3072KeyPair::default()) + fn unseal(&self) -> std::result::Result { + self.aes + .read() + .map_err(|e| Error::Other(format!("{:?}", e).into())) + .map(|k| k.clone()) } - fn seal(_unsealed: Self::Unsealed) -> Result<()> { + fn seal(&self, unsealed: Self::Unsealed) -> std::result::Result<(), Self::Error> { + let mut aes_lock = self.aes.write().map_err(|e| Error::Other(format!("{:?}", e).into()))?; + *aes_lock = unsealed; Ok(()) } } + +#[cfg(feature = "sgx")] +pub mod sgx { + use super::*; + use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; + + #[derive(Default)] + pub struct Rsa3072SealMock {} + + impl StaticSealedIO for Rsa3072SealMock { + type Error = Error; + type Unsealed = Rsa3072KeyPair; + + fn unseal_from_static_file() -> Result { + Ok(Rsa3072KeyPair::default()) + } + + fn seal_to_static_file(_unsealed: Self::Unsealed) -> Result<()> { + Ok(()) + } + } +} diff --git a/core-primitives/sgx/crypto/src/rsa3072.rs b/core-primitives/sgx/crypto/src/rsa3072.rs index 05a71ac198..2db909d436 100644 --- a/core-primitives/sgx/crypto/src/rsa3072.rs +++ b/core-primitives/sgx/crypto/src/rsa3072.rs @@ -23,7 +23,7 @@ use crate::{ }; use derive_more::Display; use itp_settings::files::RSA3072_SEALED_KEY_FILE; -use itp_sgx_io::{seal, unseal, SealedIO}; +use itp_sgx_io::{seal, unseal, SealedIO, StaticSealedIO}; use log::*; use sgx_crypto_helper::{ rsa3072::{Rsa3072KeyPair, Rsa3072PubKey}, @@ -34,23 +34,36 @@ use std::{sgxfs::SgxFile, vec::Vec}; #[derive(Copy, Clone, Debug, Display)] pub struct Rsa3072Seal; -impl SealedIO for Rsa3072Seal { +impl StaticSealedIO for Rsa3072Seal { type Error = Error; type Unsealed = Rsa3072KeyPair; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { let raw = unseal(RSA3072_SEALED_KEY_FILE)?; let key: Rsa3072KeyPair = serde_json::from_slice(&raw).map_err(|e| Error::Other(format!("{:?}", e).into()))?; Ok(key.into()) } - fn seal(unsealed: Rsa3072KeyPair) -> Result<()> { + fn seal_to_static_file(unsealed: Rsa3072KeyPair) -> Result<()> { let key_json = serde_json::to_vec(&unsealed).map_err(|e| Error::Other(format!("{:?}", e).into()))?; Ok(seal(&key_json, RSA3072_SEALED_KEY_FILE)?) } } +impl SealedIO for Rsa3072Seal { + type Error = Error; + type Unsealed = Rsa3072KeyPair; + + fn unseal(&self) -> Result { + Self::unseal_from_static_file() + } + + fn seal(&self, unsealed: Self::Unsealed) -> Result<()> { + Self::seal_to_static_file(unsealed) + } +} + impl ShieldingCrypto for Rsa3072KeyPair { type Error = Error; @@ -71,7 +84,7 @@ impl ShieldingCrypto for Rsa3072KeyPair { impl Rsa3072Seal { pub fn unseal_pubkey() -> Result { - let pair = Self::unseal()?; + let pair = Self::unseal_from_static_file()?; let pubkey = pair.export_pubkey().map_err(|e| Error::Other(format!("{:?}", e).into()))?; Ok(pubkey) } @@ -88,5 +101,5 @@ pub fn create_sealed_if_absent() -> Result<()> { pub fn create_sealed() -> Result<()> { let rsa_keypair = Rsa3072KeyPair::new().map_err(|e| Error::Other(format!("{:?}", e).into()))?; // println!("[Enclave] generated RSA3072 key pair. Cleartext: {}", rsa_key_json); - Rsa3072Seal::seal(rsa_keypair) + Rsa3072Seal::seal_to_static_file(rsa_keypair) } diff --git a/core-primitives/sgx/io/src/lib.rs b/core-primitives/sgx/io/src/lib.rs index 86463e5eb6..278ab31cd7 100644 --- a/core-primitives/sgx/io/src/lib.rs +++ b/core-primitives/sgx/io/src/lib.rs @@ -9,8 +9,10 @@ compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the sam extern crate sgx_tstd as std; use std::{ + convert::AsRef, fs, io::{Read, Result as IOResult, Write}, + path::Path, string::String, vec::Vec, }; @@ -28,27 +30,43 @@ pub trait IO: Sized { /// Abstraction around IO that is supposed to use `SgxFile`. We expose it also in `std` to /// be able to put it as trait bounds in `std` and use it in tests. +/// +/// This is the static method (or associated function) version, should be made obsolete over time, +/// since it has state, but hides it in a global state. Makes it difficult to mock. +pub trait StaticSealedIO: Sized { + type Error: From + std::fmt::Debug + 'static; + + /// Type that is unsealed. + type Unsealed; + + fn unseal_from_static_file() -> Result; + fn seal_to_static_file(unsealed: Self::Unsealed) -> Result<(), Self::Error>; +} + +/// Abstraction around IO that is supposed to use `SgxFile`. We expose it also in `std` to +/// be able to put it as trait bounds in `std` and use it in tests. +/// pub trait SealedIO: Sized { type Error: From + std::fmt::Debug + 'static; /// Type that is unsealed. type Unsealed; - fn unseal() -> Result; - fn seal(unsealed: Self::Unsealed) -> Result<(), Self::Error>; + fn unseal(&self) -> Result; + fn seal(&self, unsealed: Self::Unsealed) -> Result<(), Self::Error>; } -pub fn read(path: &str) -> IOResult> { +pub fn read>(path: P) -> IOResult> { let mut buf = Vec::new(); fs::File::open(path).map(|mut f| f.read_to_end(&mut buf))??; Ok(buf) } -pub fn write(bytes: &[u8], path: &str) -> IOResult<()> { +pub fn write>(bytes: &[u8], path: P) -> IOResult<()> { fs::File::create(path).map(|mut f| f.write_all(bytes))? } -pub fn read_to_string(filepath: &str) -> IOResult { +pub fn read_to_string>(filepath: P) -> IOResult { let mut contents = String::new(); fs::File::open(filepath).map(|mut f| f.read_to_string(&mut contents))??; Ok(contents) @@ -57,18 +75,20 @@ pub fn read_to_string(filepath: &str) -> IOResult { #[cfg(feature = "sgx")] mod sgx { use std::{ + convert::AsRef, io::{Read, Result, Write}, + path::Path, sgxfs::SgxFile, vec::Vec, }; - pub fn unseal(path: &str) -> Result> { + pub fn unseal>(path: P) -> Result> { let mut buf = Vec::new(); SgxFile::open(path).map(|mut f| f.read_to_end(&mut buf))??; Ok(buf) } - pub fn seal(bytes: &[u8], path: &str) -> Result<()> { + pub fn seal>(bytes: &[u8], path: P) -> Result<()> { SgxFile::create(path).map(|mut f| f.write_all(bytes))? } } diff --git a/core-primitives/stf-executor/Cargo.toml b/core-primitives/stf-executor/Cargo.toml index b558ac5344..14b99357b5 100644 --- a/core-primitives/stf-executor/Cargo.toml +++ b/core-primitives/stf-executor/Cargo.toml @@ -19,7 +19,6 @@ itp-ocall-api = { path = "../ocall-api", default-features = false } itp-registry-storage = { path = "../registry-storage", default-features = false } itp-stf-state-handler = { path = "../stf-state-handler", default-features = false } itp-storage = { path = "../storage", default-features = false } -itp-storage-verifier = { path = "../storage-verified", default-features = false } itp-time-utils = { path = "../time-utils", default-features = false } itp-types = { path = "../types", default-features = false } @@ -31,14 +30,14 @@ thiserror = { version = "1.0", optional = true } # no-std dependencies log = { version = "0.4", default-features = false } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } # substrate dependencies -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # node deps -pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } # dev dependencies itp-test = { path = "../test", default-features = false, optional = true } @@ -53,7 +52,6 @@ std = [ "itp-ocall-api/std", "itp-stf-state-handler/std", "itp-storage/std", - "itp-storage-verifier/std", "itp-types/std", "itp-time-utils/std", # crates.io @@ -69,7 +67,6 @@ sgx = [ "ita-stf/sgx", "itp-stf-state-handler/sgx", "itp-storage/sgx", - "itp-storage-verifier/sgx", "itp-time-utils/sgx", "sgx-externalities/sgx", "thiserror_sgx", diff --git a/core-primitives/stf-executor/src/error.rs b/core-primitives/stf-executor/src/error.rs index b6933addd7..f68d1a1571 100644 --- a/core-primitives/stf-executor/src/error.rs +++ b/core-primitives/stf-executor/src/error.rs @@ -34,8 +34,8 @@ pub enum Error { StateHandler(#[from] itp_stf_state_handler::error::Error), #[error("STF error: {0}")] Stf(ita_stf::StfError), - #[error("Storage verified error: {0}")] - StorageVerified(itp_storage_verifier::Error), + #[error("Ocall Api error: {0}")] + OcallApi(itp_ocall_api::Error), #[error(transparent)] Other(#[from] Box), } @@ -58,8 +58,8 @@ impl From for Error { } } -impl From for Error { - fn from(error: itp_storage_verifier::Error) -> Self { - Self::StorageVerified(error) +impl From for Error { + fn from(error: itp_ocall_api::Error) -> Self { + Self::OcallApi(error) } } diff --git a/core-primitives/stf-executor/src/executor.rs b/core-primitives/stf-executor/src/executor.rs index 6e5eda1fd2..93a17fbbdc 100644 --- a/core-primitives/stf-executor/src/executor.rs +++ b/core-primitives/stf-executor/src/executor.rs @@ -35,11 +35,10 @@ use ita_stf::{ AccountId, ParentchainHeader, ShardIdentifier, StateTypeDiff, Stf, TrustedCall, TrustedCallSigned, TrustedGetterSigned, }; -use itp_ocall_api::EnclaveAttestationOCallApi; +use itp_ocall_api::{EnclaveAttestationOCallApi, EnclaveOnChainOCallApi}; use itp_registry_storage::{RegistryStorage, RegistryStorageKeys}; use itp_stf_state_handler::{handle_state::HandleState, query_shard_state::QueryShardState}; use itp_storage::StorageEntryVerified; -use itp_storage_verifier::GetStorageVerified; use itp_time_utils::duration_now; use itp_types::{Amount, BlockNumber, OpaqueCall, H256}; @@ -60,8 +59,8 @@ pub struct StfExecutor { impl StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { pub fn new(ocall_api: Arc, state_handler: Arc) -> Self { @@ -108,7 +107,7 @@ where Stf::update_storage(state, &update_map.into()); - debug!("execute STF"); + debug!("execute STF, call with nonce {}", stf_call_signed.nonce); let mut extrinsic_call_backs: Vec = Vec::new(); if let Err(e) = Stf::execute(state, stf_call_signed.clone(), &mut extrinsic_call_backs) { error!("Stf::execute failed: {:?}", e); @@ -130,8 +129,8 @@ where impl StfExecuteTrustedCall for StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { fn execute_trusted_call( @@ -164,7 +163,7 @@ where calls.append(&mut extrinsic_callbacks); trace!("Updating state of shard {:?}", shard); - self.state_handler.write(state, state_lock, shard)?; + self.state_handler.write_after_mutation(state, state_lock, shard)?; Ok(maybe_call_hash) } @@ -173,8 +172,8 @@ where impl StfExecuteShieldFunds for StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { fn execute_shield_funds( @@ -197,7 +196,9 @@ where Stf::execute(&mut state, trusted_call, &mut Vec::::new()) .map_err::(|e| e.into())?; - self.state_handler.write(state, state_lock, shard).map_err(|e| e.into()) + self.state_handler + .write_after_mutation(state, state_lock, shard) + .map_err(|e| e.into()) } fn execute_new_game( @@ -234,7 +235,9 @@ where Stf::execute(&mut state, trusted_call, &mut Vec::::new()) .map_err::(|e| e.into())?; - self.state_handler.write(state, state_lock, shard).map_err(|e| e.into()) + self.state_handler + .write_after_mutation(state, state_lock, shard) + .map_err(|e| e.into()) }, None => { error!("No game entry found for game {}", game); @@ -247,8 +250,8 @@ where impl StfUpdateState for StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState + QueryShardState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState + QueryShardState, ExternalitiesT: SgxExternalitiesTrait + Encode, { fn update_states(&self, header: &ParentchainHeader) -> Result<()> { @@ -272,7 +275,7 @@ where let (state_lock, mut state) = self.state_handler.load_for_mutation(&shard_id)?; match Stf::update_parentchain_block(&mut state, header.clone()) { Ok(_) => { - self.state_handler.write(state, state_lock, &shard_id)?; + self.state_handler.write_after_mutation(state, state_lock, &shard_id)?; }, Err(e) => error!("Could not update parentchain block. {:?}: {:?}", shard_id, e), } @@ -302,7 +305,7 @@ where error!("Could not update parentchain block. {:?}: {:?}", shard_id, e) } - self.state_handler.write(state, state_lock, &shard_id)?; + self.state_handler.write_after_mutation(state, state_lock, &shard_id)?; } }, None => debug!("No shards are on the chain yet"), @@ -315,8 +318,8 @@ where impl StateUpdateProposer for StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { type Externalities = ExternalitiesT; @@ -335,7 +338,7 @@ where { let ends_at = duration_now() + max_exec_duration; - let state = self.state_handler.load_initialized(shard)?; + let state = self.state_handler.load(shard)?; let state_hash_before_execution = state_hash(&state); // Execute any pre-processing steps. @@ -376,8 +379,8 @@ where impl StfExecuteTimedGettersBatch for StfExecutor where - OCallApi: EnclaveAttestationOCallApi + GetStorageVerified, - StateHandler: HandleState, + OCallApi: EnclaveAttestationOCallApi + EnclaveOnChainOCallApi, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { type Externalities = ExternalitiesT; @@ -400,7 +403,7 @@ where } // load state once per shard - let mut state = self.state_handler.load_initialized(&shard)?; + let mut state = self.state_handler.load(&shard)?; for trusted_getter_signed in trusted_getters.into_iter() { // get state @@ -421,7 +424,7 @@ where impl StfExecuteGenericUpdate for StfExecutor where - StateHandler: HandleState, + StateHandler: HandleState, ExternalitiesT: SgxExternalitiesTrait + Encode, { type Externalities = ExternalitiesT; @@ -443,7 +446,7 @@ where let new_state_hash = self .state_handler - .write(new_state, state_lock, shard) + .write_after_mutation(new_state, state_lock, shard) .map_err(|e| Error::StateHandler(e))?; Ok((result, new_state_hash)) } diff --git a/core-primitives/stf-executor/src/executor_tests.rs b/core-primitives/stf-executor/src/executor_tests.rs index b6b671c64c..960fe51ebc 100644 --- a/core-primitives/stf-executor/src/executor_tests.rs +++ b/core-primitives/stf-executor/src/executor_tests.rs @@ -63,7 +63,7 @@ pub fn propose_state_update_executes_all_calls_given_enough_time() { let call_operation_hash_two: H256 = blake2_256(&signed_call_two.clone().into_trusted_operation(true).encode()).into(); - let old_state_hash = state_hash(&state_handler.load_initialized(&shard).unwrap()); + let old_state_hash = state_hash(&state_handler.load(&shard).unwrap()); // when let batch_execution_result = stf_executor @@ -84,10 +84,7 @@ pub fn propose_state_update_executes_all_calls_given_enough_time() { vec![call_operation_hash, call_operation_hash_two] ); // Ensure that state has been updated and not actually written. - assert_ne!( - state_handler.load_initialized(&shard).unwrap(), - batch_execution_result.state_after_execution - ); + assert_ne!(state_handler.load(&shard).unwrap(), batch_execution_result.state_after_execution); } pub fn propose_state_update_executes_only_one_trusted_call_given_not_enough_time() { @@ -111,7 +108,7 @@ pub fn propose_state_update_executes_only_one_trusted_call_given_not_enough_time ) .sign(&sender.clone().into(), 0, &mrenclave, &shard); - let old_state_hash = state_hash(&state_handler.load_initialized(&shard).unwrap()); + let old_state_hash = state_hash(&state_handler.load(&shard).unwrap()); // when let batch_execution_result = stf_executor @@ -129,19 +126,17 @@ pub fn propose_state_update_executes_only_one_trusted_call_given_not_enough_time assert_eq!(batch_execution_result.executed_operations.len(), 1); assert_eq!(batch_execution_result.get_executed_operation_hashes(), vec![call_operation_hash]); // Ensure that state has been updated and not actually written. - assert_ne!( - state_handler.load_initialized(&shard).unwrap(), - batch_execution_result.state_after_execution - ); + assert_ne!(state_handler.load(&shard).unwrap(), batch_execution_result.state_after_execution); } pub fn propose_state_update_always_executes_preprocessing_step() { // given let shard = ShardIdentifier::default(); let (stf_executor, _, state_handler) = stf_executor(); + let _init_hash = state_handler.initialize_shard(shard).unwrap(); let key = "my_key".encode(); let value = "my_value".encode(); - let old_state_hash = state_hash(&state_handler.load_initialized(&shard).unwrap()); + let old_state_hash = state_hash(&state_handler.load(&shard).unwrap()); // when let batch_execution_result = stf_executor @@ -161,7 +156,7 @@ pub fn propose_state_update_always_executes_preprocessing_step() { assert_eq!(old_state_hash, batch_execution_result.state_hash_before_execution); // Ensure that state has been updated. - let old_state = state_handler.load_initialized(&shard).unwrap(); + let old_state = state_handler.load(&shard).unwrap(); let retrieved_value = batch_execution_result.state_after_execution.get(key.as_slice()).unwrap(); assert_eq!(*retrieved_value, value); // Ensure that state has not been actually written. @@ -244,9 +239,10 @@ pub fn execute_update_works() { // given let shard = ShardIdentifier::default(); let (stf_executor, _ocall_api, state_handler) = stf_executor(); + let _init_hash = state_handler.initialize_shard(shard).unwrap(); let key = "my_key".encode(); let value = "my_value".encode(); - let old_state_hash = state_hash(&state_handler.load_initialized(&shard).unwrap()); + let old_state_hash = state_hash(&state_handler.load(&shard).unwrap()); // when let (result, updated_state_hash) = stf_executor @@ -261,7 +257,7 @@ pub fn execute_update_works() { assert_ne!(updated_state_hash, old_state_hash); // Ensure that state has been written. - let updated_state = state_handler.load_initialized(&shard).unwrap(); + let updated_state = state_handler.load(&shard).unwrap(); let retrieved_value = updated_state.get(key.as_slice()).unwrap(); assert_eq!(*retrieved_value, value); } @@ -307,11 +303,12 @@ fn init_state_and_shard_with_state_handler>( state_handler: &S, ) -> (State, ShardIdentifier) { let shard = ShardIdentifier::default(); + let _hash = state_handler.initialize_shard(shard).unwrap(); let (lock, mut state) = state_handler.load_for_mutation(&shard).unwrap(); test_genesis_setup(&mut state); - state_handler.write(state.clone(), lock, &shard).unwrap(); + state_handler.write_after_mutation(state.clone(), lock, &shard).unwrap(); (state, shard) } diff --git a/core-primitives/stf-state-handler/Cargo.toml b/core-primitives/stf-state-handler/Cargo.toml index aad7499a1e..9f20628aa3 100644 --- a/core-primitives/stf-state-handler/Cargo.toml +++ b/core-primitives/stf-state-handler/Cargo.toml @@ -14,6 +14,7 @@ std = [ "ita-stf/std", "itp-sgx-crypto/std", "itp-sgx-io/std", + "itp-time-utils/std", "itp-types/std", "sgx-externalities/std", "thiserror", @@ -25,6 +26,7 @@ sgx = [ "ita-stf/sgx", "itp-sgx-crypto/sgx", "itp-sgx-io/sgx", + "itp-time-utils/sgx", "sgx-externalities/sgx", "thiserror_sgx", ] @@ -40,6 +42,7 @@ ita-stf = { path = "../../app-libs/stf", default-features = false } itp-settings = { path = "../../core-primitives/settings" } itp-sgx-crypto = { path = "../../core-primitives/sgx/crypto", default-features = false } itp-sgx-io = { path = "../../core-primitives/sgx/io", default-features = false } +itp-time-utils = { path = "../../core-primitives/time-utils", default-features = false } itp-types = { path = "../../core-primitives/types", default-features = false } # sgx enabled external libraries @@ -54,7 +57,10 @@ thiserror = { version = "1.0", optional = true } sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", optional = true } # no-std dependencies -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } lazy_static = { version = "1.1.0", features = ["spin_no_std"] } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } + +[dev-dependencies] +itp-sgx-crypto = { path = "../../core-primitives/sgx/crypto", features = ["mocks"] } \ No newline at end of file diff --git a/core-primitives/stf-state-handler/src/error.rs b/core-primitives/stf-state-handler/src/error.rs index bba2db2d1b..c990238552 100644 --- a/core-primitives/stf-state-handler/src/error.rs +++ b/core-primitives/stf-state-handler/src/error.rs @@ -24,13 +24,25 @@ use rust_base58::base58::FromBase58Error; #[cfg(feature = "sgx")] use base58::FromBase58Error; +use crate::state_snapshot_primitives::StateId; +use itp_types::ShardIdentifier; use sgx_types::sgx_status_t; -use std::{boxed::Box, format}; +use std::{boxed::Box, format, string::String}; pub type Result = core::result::Result; #[derive(Debug, thiserror::Error)] pub enum Error { + #[error("Empty state repository")] + EmptyRepository, + #[error("State ID is invalid and does not exist: {0}")] + InvalidStateId(StateId), + #[error("Shard is invalid and does not exist: {0}")] + InvalidShard(ShardIdentifier), + #[error("State with hash {0} could not be found in the state repository")] + StateNotFoundInRepository(String), + #[error("Cache size for registry is zero")] + ZeroCacheSize, #[error("Could not acquire lock, lock is poisoned")] LockPoisoning, #[error("OsString conversion error")] diff --git a/core-primitives/stf-state-handler/src/file_io.rs b/core-primitives/stf-state-handler/src/file_io.rs index 58f3923bb8..bfeac8e290 100644 --- a/core-primitives/stf-state-handler/src/file_io.rs +++ b/core-primitives/stf-state-handler/src/file_io.rs @@ -18,134 +18,373 @@ #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; -use crate::error::{Error, Result}; -use base58::{FromBase58, ToBase58}; -use codec::{Decode, Encode}; -use ita_stf::{State as StfState, StateType as StfStateType, Stf}; -use itp_settings::files::{ENCRYPTED_STATE_FILE, SHARDS_PATH}; -use itp_sgx_crypto::{AesSeal, StateCrypto}; -use itp_sgx_io::{read as io_read, write as io_write, SealedIO}; -use itp_types::{ShardIdentifier, H256}; -use log::*; -use sgx_tcrypto::rsgx_sha256_slice; -use sgx_types::sgx_status_t; -use std::{format, fs, io::Write, path::Path, vec::Vec}; - -pub(crate) fn load_initialized_state(shard: &ShardIdentifier) -> Result { - trace!("Loading state from shard {:?}", shard); - let state = if exists(&shard) { - load(&shard)? - } else { - trace!("Initialize new shard: {:?}", shard); - init_shard(&shard)?; - Stf::init_state() - }; - trace!("Successfully loaded or initialized state from shard {:?}", shard); - Ok(state) -} +#[cfg(any(test, feature = "std"))] +use rust_base58::base58::ToBase58; -pub(crate) fn load(shard: &ShardIdentifier) -> Result { - // load last state - let state_path = - format!("{}/{}/{}", SHARDS_PATH, shard.encode().to_base58(), ENCRYPTED_STATE_FILE); - trace!("loading state from: {}", state_path); - let state_vec = read(&state_path)?; - - // state is now decrypted! - let state: StfStateType = match state_vec.len() { - 0 => { - debug!("state at {} is empty. will initialize it.", state_path); - Stf::init_state().state - }, - n => { - debug!("State loaded from {} with size {}B, deserializing...", state_path, n); - StfStateType::decode(&mut state_vec.as_slice())? - }, - }; - trace!("state decoded successfully"); - // add empty state-diff - let state_with_diff = StfState { state, state_diff: Default::default() }; - trace!("New state created: {:?}", state_with_diff); - Ok(state_with_diff) -} +#[cfg(feature = "sgx")] +use base58::ToBase58; -/// Writes the state (without the state diff) encrypted into the enclave storage -/// Returns the hash of the saved state (independent of the diff!) -pub(crate) fn write(state: StfState, shard: &ShardIdentifier) -> Result { - let state_path = - format!("{}/{}/{}", SHARDS_PATH, shard.encode().to_base58(), ENCRYPTED_STATE_FILE); - trace!("writing state to: {}", state_path); +#[cfg(any(test, feature = "sgx"))] +use itp_settings::files::ENCRYPTED_STATE_FILE; - // only save the state, the state diff is pruned - let cyphertext = encrypt(state.state.encode())?; +#[cfg(any(test, feature = "sgx"))] +use std::string::String; - let state_hash = rsgx_sha256_slice(&cyphertext)?; +use crate::{error::Result, state_snapshot_primitives::StateId}; +use codec::Encode; +use itp_settings::files::SHARDS_PATH; +use itp_types::ShardIdentifier; +use log::error; +use std::{format, path::PathBuf, vec::Vec}; - debug!("new encrypted state with hash={:?} written to {}", state_hash, state_path); +/// Trait to abstract file I/O for state. +pub trait StateFileIo { + type StateType; + type HashType; - io_write(&cyphertext, &state_path)?; - Ok(state_hash.into()) -} + /// Load a state (returns error if it does not exist). + fn load( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result; -pub(crate) fn exists(shard: &ShardIdentifier) -> bool { - Path::new(&format!("{}/{}/{}", SHARDS_PATH, shard.encode().to_base58(), ENCRYPTED_STATE_FILE)) - .exists() -} + /// Compute the state hash of a specific state (returns error if it does not exist). + fn compute_hash( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result; + + /// Create an empty (default initialized) state. + fn create_initialized( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result; + + /// Write the state. + fn write( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + state: Self::StateType, + ) -> Result; + + /// Remove a state. + fn remove(&self, shard_identifier: &ShardIdentifier, state_id: StateId) -> Result<()>; + + /// Checks if a given shard directory exists and contains at least one state instance. + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool; -pub(crate) fn init_shard(shard: &ShardIdentifier) -> Result<()> { - let path = format!("{}/{}", SHARDS_PATH, shard.encode().to_base58()); - fs::create_dir_all(path.clone())?; - let mut file = fs::File::create(format!("{}/{}", path, ENCRYPTED_STATE_FILE))?; - Ok(file.write_all(b"")?) + /// Lists all shards. + fn list_shards(&self) -> Result>; + + /// List all states for a shard. + fn list_state_ids_for_shard(&self, shard_identifier: &ShardIdentifier) -> Result>; } -pub(crate) fn read(path: &str) -> Result> { - let mut bytes = io_read(path)?; +#[cfg(feature = "sgx")] +pub mod sgx { + + use super::*; + use crate::{error::Error, state_key_repository::AccessStateKey}; + use base58::FromBase58; + use codec::Decode; + use ita_stf::{State as StfState, StateType as StfStateType, Stf}; + use itp_sgx_crypto::StateCrypto; + use itp_sgx_io::{read as io_read, write as io_write}; + use itp_types::H256; + use log::*; + use sgx_tcrypto::rsgx_sha256_slice; + use std::{fs, path::Path, sync::Arc}; - if bytes.is_empty() { - return Ok(bytes) + /// SGX state file I/O. + pub struct SgxStateFileIo { + state_key_repository: Arc, } - let state_hash = rsgx_sha256_slice(&bytes)?; - debug!( - "read encrypted state with hash {:?} from {}", - H256::from_slice(state_hash.as_ref()), - path - ); + impl SgxStateFileIo + where + StateKeyRepository: AccessStateKey, + { + pub fn new(state_key_repository: Arc) -> Self { + SgxStateFileIo { state_key_repository } + } + + fn read(&self, path: &Path) -> Result> { + let mut bytes = io_read(path)?; + + if bytes.is_empty() { + return Ok(bytes) + } + + let state_hash = rsgx_sha256_slice(&bytes)?; + debug!( + "read encrypted state with hash {:?} from {:?}", + H256::from_slice(state_hash.as_ref()), + path + ); + + let state_key = self.state_key_repository.retrieve_key()?; + + state_key + .decrypt(&mut bytes) + .map_err(|e| Error::Other(format!("{:?}", e).into()))?; + trace!("buffer decrypted = {:?}", bytes); + + Ok(bytes) + } + + fn encrypt(&self, mut state: Vec) -> Result> { + let state_key = self.state_key_repository.retrieve_key()?; + + state_key + .encrypt(&mut state) + .map_err(|e| Error::Other(format!("{:?}", e).into()))?; + Ok(state) + } + } + + impl StateFileIo for SgxStateFileIo + where + StateKey: AccessStateKey, + { + type StateType = StfState; + type HashType = H256; + + fn load( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + if !file_for_state_exists(shard_identifier, state_id) { + return Err(Error::InvalidStateId(state_id)) + } + + let state_path = state_file_path(shard_identifier, state_id); + trace!("loading state from: {:?}", state_path); + let state_encoded = self.read(&state_path)?; + + // State is now decrypted. + debug!( + "State loaded from {:?} with size {}B, deserializing...", + state_path, + state_encoded.len() + ); + let state = StfStateType::decode(&mut state_encoded.as_slice())?; + + trace!("state decoded successfully"); + // Add empty state-diff. + let state_with_diff = StfState { state, state_diff: Default::default() }; + trace!("New state created: {:?}", state_with_diff); + Ok(state_with_diff) + } + + fn compute_hash( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + if !file_for_state_exists(shard_identifier, state_id) { + return Err(Error::InvalidStateId(state_id)) + } + + let state_file_path = state_file_path(shard_identifier, state_id); + let bytes = io_read(state_file_path)?; + let state_hash = rsgx_sha256_slice(&bytes)?; + Ok(H256::from_slice(state_hash.as_ref())) + } + + fn create_initialized( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + init_shard(&shard_identifier)?; + let state = Stf::init_state(); + self.write(shard_identifier, state_id, state) + } + + /// Writes the state (without the state diff) encrypted into the enclave storage. + /// Returns the hash of the saved state (independent of the diff!). + fn write( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + state: Self::StateType, + ) -> Result { + let state_path = state_file_path(shard_identifier, state_id); + trace!("writing state to: {:?}", state_path); + + // Only save the state, the state diff is pruned. + let cyphertext = self.encrypt(state.state.encode())?; + + let state_hash = rsgx_sha256_slice(&cyphertext)?; + + debug!("new encrypted state with hash={:?} written to {:?}", state_hash, state_path); - AesSeal::unseal().map(|key| key.decrypt(&mut bytes))??; - trace!("buffer decrypted = {:?}", bytes); + io_write(&cyphertext, &state_path)?; + Ok(state_hash.into()) + } - Ok(bytes) + fn remove(&self, shard_identifier: &ShardIdentifier, state_id: StateId) -> Result<()> { + fs::remove_file(state_file_path(shard_identifier, state_id)) + .map_err(|e| Error::Other(e.into())) + } + + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool { + shard_exists(shard_identifier) + } + + fn list_shards(&self) -> Result> { + list_shards() + } + + fn list_state_ids_for_shard( + &self, + shard_identifier: &ShardIdentifier, + ) -> Result> { + let shard_path = shard_path(shard_identifier); + let directory_items = list_items_in_directory(&shard_path); + + Ok(directory_items + .iter() + .flat_map(|item| { + let maybe_state_id = extract_state_id_from_file_name(item.as_str()); + if maybe_state_id.is_none() { + warn!("Found item ({}) that does not match state snapshot naming pattern, ignoring it", item) + } + maybe_state_id + }) + .collect()) + } + } + + fn state_file_path(shard: &ShardIdentifier, state_id: StateId) -> PathBuf { + let mut shard_file_path = shard_path(shard); + shard_file_path.push(to_file_name(state_id)); + shard_file_path + } + + fn file_for_state_exists(shard: &ShardIdentifier, state_id: StateId) -> bool { + state_file_path(shard, state_id).exists() + } + + /// Returns true if a shard directory for a given identifier exists AND contains at least one state file. + pub(crate) fn shard_exists(shard: &ShardIdentifier) -> bool { + let shard_path = shard_path(shard); + if !shard_path.exists() { + return false + } + + shard_path + .read_dir() + // When the iterator over all files in the directory returns none, the directory is empty. + .map(|mut d| d.next().is_some()) + .unwrap_or(false) + } + + pub(crate) fn init_shard(shard: &ShardIdentifier) -> Result<()> { + let path = shard_path(shard); + fs::create_dir_all(path).map_err(|e| Error::Other(e.into())) + } + + /// List any valid shards that are found in the shard path. + /// Ignore any items (files, directories) that are not valid shard identifiers. + pub(crate) fn list_shards() -> Result> { + let directory_items = list_items_in_directory(&PathBuf::from(SHARDS_PATH)); + Ok(directory_items + .iter() + .flat_map(|item| { + item.from_base58() + .ok() + .map(|encoded_shard_id| { + ShardIdentifier::decode(&mut encoded_shard_id.as_slice()).ok() + }) + .flatten() + }) + .collect()) + } + + fn list_items_in_directory(directory: &Path) -> Vec { + let items = match directory.read_dir() { + Ok(rd) => rd, + Err(_) => return Vec::new(), + }; + + items + .flat_map(|fr| fr.map(|de| de.file_name().into_string().ok()).ok().flatten()) + .collect() + } } -#[allow(unused)] -fn write_encrypted(bytes: &mut Vec, path: &str) -> Result { - debug!("plaintext data to be written: {:?}", bytes); - AesSeal::unseal().map(|key| key.encrypt(bytes))?; - io_write(&bytes, path)?; - Ok(sgx_status_t::SGX_SUCCESS) +/// Remove a shard directory with all of its content. +pub fn purge_shard_dir(shard: &ShardIdentifier) { + let shard_dir_path = shard_path(shard); + if let Err(e) = std::fs::remove_dir_all(&shard_dir_path) { + error!("Failed to remove shard directory {:?}: {:?}", shard_dir_path, e); + } +} + +pub(crate) fn shard_path(shard: &ShardIdentifier) -> PathBuf { + PathBuf::from(format!("{}/{}", SHARDS_PATH, shard.encode().to_base58())) } -pub(crate) fn encrypt(mut state: Vec) -> Result> { - AesSeal::unseal().map(|key| key.encrypt(&mut state))??; - Ok(state) +#[cfg(any(test, feature = "sgx"))] +fn to_file_name(state_id: StateId) -> String { + format!("{}_{}", state_id, ENCRYPTED_STATE_FILE) } -pub(crate) fn list_shards() -> Result> { - let files = match fs::read_dir(SHARDS_PATH) { - Ok(f) => f, - Err(_) => return Ok(Vec::new()), - }; - let mut shards = Vec::new(); - for file_result in files { - let s = file_result? - .file_name() - .into_string() - .map_err(|_| Error::OsStringConversion)? - .from_base58()?; - - shards.push(ShardIdentifier::decode(&mut s.as_slice())?); +#[cfg(any(test, feature = "sgx"))] +fn extract_state_id_from_file_name(file_name: &str) -> Option { + let state_id_str = file_name.strip_suffix(format!("_{}", ENCRYPTED_STATE_FILE).as_str())?; + state_id_str.parse::().ok() +} + +#[cfg(test)] +mod tests { + + use super::*; + use crate::state_snapshot_primitives::generate_current_timestamp_state_id; + + #[test] + fn state_id_to_file_name_works() { + assert!(to_file_name(generate_current_timestamp_state_id()).ends_with(ENCRYPTED_STATE_FILE)); + assert!(to_file_name(generate_current_timestamp_state_id()) + .strip_suffix(format!("_{}", ENCRYPTED_STATE_FILE).as_str()) + .is_some()); + + let now_time_stamp = generate_current_timestamp_state_id(); + assert_eq!( + extract_state_id_from_file_name(to_file_name(now_time_stamp).as_str()).unwrap(), + now_time_stamp + ); + } + + #[test] + fn extract_timestamp_from_file_name_works() { + assert_eq!( + 123456u128, + extract_state_id_from_file_name(format!("123456_{}", ENCRYPTED_STATE_FILE).as_str()) + .unwrap() + ); + assert_eq!( + 0u128, + extract_state_id_from_file_name(format!("0_{}", ENCRYPTED_STATE_FILE).as_str()) + .unwrap() + ); + + assert!(extract_state_id_from_file_name( + format!("987345{}", ENCRYPTED_STATE_FILE).as_str() + ) + .is_none()); + assert!( + extract_state_id_from_file_name(format!("{}", ENCRYPTED_STATE_FILE).as_str()).is_none() + ); + assert!(extract_state_id_from_file_name( + format!("1234_{}-other", ENCRYPTED_STATE_FILE).as_str() + ) + .is_none()); } - Ok(shards) } diff --git a/core-primitives/stf-state-handler/src/global_file_state_handler.rs b/core-primitives/stf-state-handler/src/global_file_state_handler.rs deleted file mode 100644 index 03933e6037..0000000000 --- a/core-primitives/stf-state-handler/src/global_file_state_handler.rs +++ /dev/null @@ -1,83 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -#[cfg(feature = "sgx")] -use std::sync::{SgxRwLock as RwLock, SgxRwLockWriteGuard as RwLockWriteGuard}; - -#[cfg(feature = "std")] -use std::sync::{RwLock, RwLockWriteGuard}; - -use crate::{ - error::{Error, Result}, - file_io::{exists, list_shards, load_initialized_state, write as state_write}, - handle_state::HandleState, - query_shard_state::QueryShardState, -}; -use ita_stf::State as StfState; -use itp_types::{ShardIdentifier, H256}; -use lazy_static::lazy_static; -use std::vec::Vec; - -lazy_static! { - // as long as we have a file backend, we use this 'dummy' lock, - // which guards against concurrent read/write access - pub static ref STF_STATE_LOCK: RwLock<()> = Default::default(); -} - -/// Implementation of the `HandleState` trait using global files and locks. -/// -/// For each call it will make a file access and encrypt/decrypt the state from file I/O. -/// The lock it uses is therefore an 'empty' dummy lock, that guards against concurrent file access. -pub struct GlobalFileStateHandler; - -impl HandleState for GlobalFileStateHandler { - type WriteLockPayload = (); - type StateT = StfState; - - fn load_initialized(&self, shard: &ShardIdentifier) -> Result { - let _state_read_lock = STF_STATE_LOCK.read().map_err(|_| Error::LockPoisoning)?; - load_initialized_state(shard) - } - - fn load_for_mutation( - &self, - shard: &ShardIdentifier, - ) -> Result<(RwLockWriteGuard<'_, Self::WriteLockPayload>, Self::StateT)> { - let state_write_lock = STF_STATE_LOCK.write().map_err(|_| Error::LockPoisoning)?; - let loaded_state = load_initialized_state(shard)?; - Ok((state_write_lock, loaded_state)) - } - - fn write( - &self, - state: Self::StateT, - _state_lock: RwLockWriteGuard<'_, Self::WriteLockPayload>, - shard: &ShardIdentifier, - ) -> Result { - state_write(state, shard) - } -} - -impl QueryShardState for GlobalFileStateHandler { - fn exists(&self, shard: &ShardIdentifier) -> bool { - exists(shard) - } - - fn list_shards(&self) -> Result> { - list_shards() - } -} diff --git a/core-primitives/stf-state-handler/src/handle_state.rs b/core-primitives/stf-state-handler/src/handle_state.rs index 419849758f..c6b8702610 100644 --- a/core-primitives/stf-state-handler/src/handle_state.rs +++ b/core-primitives/stf-state-handler/src/handle_state.rs @@ -22,20 +22,25 @@ use std::sync::SgxRwLockWriteGuard as RwLockWriteGuard; use std::sync::RwLockWriteGuard; use crate::error::Result; -use itp_types::{ShardIdentifier, H256}; +use itp_types::ShardIdentifier; -/// Facade for handling STF state loading and storing (e.g. from file) +/// Facade for handling STF state loading and storing (e.g. from file). pub trait HandleState { type WriteLockPayload; type StateT; + type HashType; - /// Load the state for a given shard + /// Initialize a new shard. /// - /// Initializes the shard and state if necessary, so this is guaranteed to - /// return a state - fn load_initialized(&self, shard: &ShardIdentifier) -> Result; + /// Initializes a default state for the shard and returns its hash. + fn initialize_shard(&self, shard: ShardIdentifier) -> Result; - /// Load the state in order to mutate it + /// Load the state for a given shard. + /// + /// Requires the shard to exist and be initialized, otherwise returns an error. + fn load(&self, shard: &ShardIdentifier) -> Result; + + /// Load the state in order to mutate it. /// /// Returns a write lock to protect against any concurrent access as long as /// the lock is held. Finalize the operation by calling `write` and returning @@ -45,13 +50,18 @@ pub trait HandleState { shard: &ShardIdentifier, ) -> Result<(RwLockWriteGuard<'_, Self::WriteLockPayload>, Self::StateT)>; - /// Writes the state (without the state diff) encrypted into the enclave + /// Writes the state (without the state diff) encrypted into the enclave. /// - /// Returns the hash of the saved state (independent of the diff!) - fn write( + /// Returns the hash of the saved state (independent of the diff!). + fn write_after_mutation( &self, state: Self::StateT, state_lock: RwLockWriteGuard<'_, Self::WriteLockPayload>, shard: &ShardIdentifier, - ) -> Result; + ) -> Result; + + /// Reset (or override) a state. + /// + /// Use in cases where the previous state is of no interest. Otherwise use `load_for_mutation` and `write_after_mutation`. + fn reset(&self, state: Self::StateT, shard: &ShardIdentifier) -> Result; } diff --git a/core-primitives/stf-state-handler/src/in_memory_state_file_io.rs b/core-primitives/stf-state-handler/src/in_memory_state_file_io.rs new file mode 100644 index 0000000000..f9dfb921ed --- /dev/null +++ b/core-primitives/stf-state-handler/src/in_memory_state_file_io.rs @@ -0,0 +1,380 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + +use crate::{ + error::{Error, Result}, + file_io::StateFileIo, + state_snapshot_primitives::StateId, +}; +use codec::Encode; +use itp_types::ShardIdentifier; +use std::{collections::HashMap, hash::Hasher as HasherTrait, vec::Vec}; + +type StateHash = u64; +type ShardDirectory = HashMap; +type ShardsRootDirectory = HashMap>; + +/// State file I/O using (unencrypted) in-memory representation of the state files. +/// Uses u64 hash type. Can be used as mock for testing. +#[derive(Default)] +pub struct InMemoryStateFileIo +where + State: Clone + Default + Encode, + Hasher: HasherTrait + Clone + Default, +{ + emulated_shard_directory: RwLock>, + hasher: Hasher, +} + +impl InMemoryStateFileIo +where + State: Clone + Default + Encode, + Hasher: HasherTrait + Clone + Default, +{ + #[allow(unused)] + pub fn new(hash_function: Hasher, shards: &[ShardIdentifier]) -> Self { + let shard_hash_map: HashMap<_, _> = + shards.iter().map(|s| (*s, ShardDirectory::::default())).collect(); + + InMemoryStateFileIo { + emulated_shard_directory: RwLock::new(shard_hash_map), + hasher: hash_function, + } + } + + #[cfg(test)] + pub fn get_states_for_shard( + &self, + shard_identifier: &ShardIdentifier, + ) -> Result> { + let files_lock = self.emulated_shard_directory.read().map_err(|_| Error::LockPoisoning)?; + files_lock + .get(shard_identifier) + .cloned() + .ok_or_else(|| Error::InvalidShard(*shard_identifier)) + } + + fn compute_state_hash(&self, state: &State) -> StateHash { + let encoded_state = state.encode(); + let mut hasher = self.hasher.clone(); + hasher.write(encoded_state.as_slice()); + hasher.finish() + } + + fn default_states_map(&self, state_id: StateId) -> ShardDirectory { + self.initialize_states_map(state_id, State::default()) + } + + fn initialize_states_map(&self, state_id: StateId, state: State) -> ShardDirectory { + HashMap::from([(state_id, self.generate_state_entry(state))]) + } + + fn generate_default_state_entry(&self) -> (StateHash, State) { + self.generate_state_entry(State::default()) + } + + fn generate_state_entry(&self, state: State) -> (StateHash, State) { + let state_hash = self.compute_state_hash(&state); + (state_hash, state) + } +} + +impl StateFileIo for InMemoryStateFileIo +where + State: Clone + Default + Encode, + Hasher: HasherTrait + Clone + Default, +{ + type StateType = State; + type HashType = StateHash; + + fn load( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + let directory_lock = + self.emulated_shard_directory.read().map_err(|_| Error::LockPoisoning)?; + let states_for_shard = directory_lock + .get(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier))?; + states_for_shard + .get(&state_id) + .map(|(_, s)| -> State { s.clone() }) + .ok_or(Error::InvalidStateId(state_id)) + } + + fn compute_hash( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + let state = self.load(shard_identifier, state_id)?; + Ok(self.compute_state_hash(&state)) + } + + fn create_initialized( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + ) -> Result { + let mut directory_lock = + self.emulated_shard_directory.write().map_err(|_| Error::LockPoisoning)?; + let states_for_shard = directory_lock + .entry(*shard_identifier) + .or_insert_with(|| self.default_states_map(state_id)); + let state_entry = states_for_shard + .entry(state_id) + .or_insert_with(|| self.generate_state_entry(State::default())); + Ok(state_entry.0) + } + + fn write( + &self, + shard_identifier: &ShardIdentifier, + state_id: StateId, + state: Self::StateType, + ) -> Result { + let mut directory_lock = + self.emulated_shard_directory.write().map_err(|_| Error::LockPoisoning)?; + + let states_for_shard = directory_lock + .entry(*shard_identifier) + .or_insert_with(|| self.default_states_map(state_id)); + + let state_hash = self.compute_state_hash(&state); + *states_for_shard + .entry(state_id) + .or_insert_with(|| self.generate_default_state_entry()) = (state_hash, state); + + Ok(state_hash) + } + + fn remove(&self, shard_identifier: &ShardIdentifier, state_id: StateId) -> Result<()> { + let mut directory_lock = + self.emulated_shard_directory.write().map_err(|_| Error::LockPoisoning)?; + + let states_for_shard = directory_lock + .get_mut(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier))?; + + states_for_shard + .remove(&state_id) + .ok_or(Error::InvalidStateId(state_id)) + .map(|_| {}) + } + + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool { + let directory_lock = self.emulated_shard_directory.read().unwrap(); + directory_lock.contains_key(shard_identifier) + } + + fn list_shards(&self) -> Result> { + let directory_lock = + self.emulated_shard_directory.read().map_err(|_| Error::LockPoisoning)?; + Ok(directory_lock.keys().copied().collect()) + } + + fn list_state_ids_for_shard(&self, shard_identifier: &ShardIdentifier) -> Result> { + let directory_lock = + self.emulated_shard_directory.read().map_err(|_| Error::LockPoisoning)?; + let shard_directory = directory_lock + .get(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier))?; + Ok(shard_directory.keys().cloned().collect()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::{assert_matches::assert_matches, collections::hash_map::DefaultHasher}; + + type TestState = u64; + type TestStateFileIo = InMemoryStateFileIo; + + #[test] + fn shard_directory_is_empty_after_initialization() { + let state_file_io = create_empty_in_memory_state_file_io(); + assert!(state_file_io.list_shards().unwrap().is_empty()); + } + + #[test] + fn load_on_empty_directory_and_shard_returns_error() { + let state_file_io = create_empty_in_memory_state_file_io(); + + assert_matches!( + state_file_io.load(&ShardIdentifier::random(), 1234), + Err(Error::InvalidShard(_)) + ); + } + + #[test] + fn initialize_with_shard_creates_empty_directory() { + let shard = ShardIdentifier::from([2u8; 32]); + let state_file_io = create_in_memory_state_file_io(&[shard]); + + assert!(state_file_io.list_state_ids_for_shard(&shard).unwrap().is_empty()); + assert!(state_file_io + .list_state_ids_for_shard(&ShardIdentifier::from([3u8; 32])) + .is_err()); + } + + #[test] + fn load_when_state_does_not_exist_returns_error() { + let state_file_io = create_empty_in_memory_state_file_io(); + let shard_id = ShardIdentifier::random(); + let _ = state_file_io.create_initialized(&shard_id, 1234).unwrap(); + + assert_matches!(state_file_io.load(&shard_id, 12345), Err(Error::InvalidStateId(12345))); + } + + #[test] + fn create_initialized_when_shard_already_exists_works() { + let shard = ShardIdentifier::random(); + let state_file_io = create_in_memory_state_file_io(&[shard]); + + assert!(state_file_io.create_initialized(&shard, 1245).is_ok()); + } + + #[test] + fn create_initialized_adds_default_state() { + let state_file_io = create_empty_in_memory_state_file_io(); + let shard_id = ShardIdentifier::random(); + let state_id = 31081984u128; + let state_hash = state_file_io.create_initialized(&shard_id, state_id).unwrap(); + + assert_eq!(1, state_file_io.list_shards().unwrap().len()); + assert_eq!(TestState::default(), state_file_io.load(&shard_id, state_id).unwrap()); + assert_eq!(1, state_file_io.list_state_ids_for_shard(&shard_id).unwrap().len()); + + assert_entry(&state_file_io, &shard_id, state_id, &StateHash::default(), &state_hash); + } + + #[test] + fn write_works_when_no_previous_shard_or_file_exists() { + let state_file_io = create_empty_in_memory_state_file_io(); + let shard_id = ShardIdentifier::random(); + let state_id = 23u128; + let test_state = 42u64; + + let state_hash = state_file_io.write(&shard_id, state_id, test_state).unwrap(); + + assert_eq!(1, state_file_io.list_shards().unwrap().len()); + assert_eq!(test_state, state_file_io.load(&shard_id, state_id).unwrap()); + assert_eq!(1, state_file_io.list_state_ids_for_shard(&shard_id).unwrap().len()); + assert_entry(&state_file_io, &shard_id, state_id, &test_state, &state_hash); + } + + #[test] + fn write_overwrites_existing_state() { + let state_file_io = create_empty_in_memory_state_file_io(); + let shard_id = ShardIdentifier::random(); + let state_id = 123456u128; + let _ = state_file_io.create_initialized(&shard_id, state_id).unwrap(); + + let test_state = 4256u64; + let state_hash = state_file_io.write(&shard_id, state_id, test_state).unwrap(); + + assert_eq!(1, state_file_io.list_shards().unwrap().len()); + assert_eq!(test_state, state_file_io.load(&shard_id, state_id).unwrap()); + assert_eq!(1, state_file_io.list_state_ids_for_shard(&shard_id).unwrap().len()); + assert_entry(&state_file_io, &shard_id, state_id, &test_state, &state_hash); + } + + #[test] + fn remove_files_works() { + let state_file_io = create_empty_in_memory_state_file_io(); + let shard_id = ShardIdentifier::random(); + let initial_state_id = 42u128; + let _ = state_file_io.create_initialized(&shard_id, initial_state_id).unwrap(); + + let state_ids = vec![1u128, 2u128, 3u128]; + + for state_id in state_ids.iter() { + let _ = state_file_io.write(&shard_id, *state_id, 987345).unwrap(); + } + + let mut expected_size = state_ids.len() + 1; + assert_eq!(expected_size, state_file_io.list_state_ids_for_shard(&shard_id).unwrap().len()); + expected_size -= 1; + + for state_id in state_ids.iter() { + state_file_io.remove(&shard_id, *state_id).unwrap(); + assert_matches!( + state_file_io.load(&shard_id, *state_id), + Err(Error::InvalidStateId(_)) + ); + assert_eq!( + expected_size, + state_file_io.list_state_ids_for_shard(&shard_id).unwrap().len() + ); + expected_size -= 1; + } + } + + #[test] + fn initialize_with_shards_creates_empty_maps() { + let shards = vec![ShardIdentifier::random(), ShardIdentifier::random()]; + let state_file_io = create_in_memory_state_file_io(shards.as_slice()); + + assert_eq!(shards.len(), state_file_io.list_shards().unwrap().len()); + for shard in shards { + assert!(state_file_io.list_state_ids_for_shard(&shard).unwrap().is_empty()); + } + } + + fn assert_entry( + state_file_io: &TestStateFileIo, + shard_id: &ShardIdentifier, + state_id: StateId, + state: &TestState, + state_hash: &StateHash, + ) { + let (retrieved_hash, retrieved_state) = + get_state_entry(&state_file_io, &shard_id, state_id); + assert!(state_file_io.shard_exists(shard_id)); + assert_eq!(state_hash, &retrieved_hash); + assert_eq!(state, &retrieved_state); + } + + fn get_state_entry( + state_file_io: &TestStateFileIo, + shard_id: &ShardIdentifier, + state_id: StateId, + ) -> (StateHash, TestState) { + state_file_io + .get_states_for_shard(shard_id) + .unwrap() + .get(&state_id) + .unwrap() + .clone() + } + + fn create_in_memory_state_file_io(shards: &[ShardIdentifier]) -> TestStateFileIo { + InMemoryStateFileIo::new(DefaultHasher::default(), shards) + } + + fn create_empty_in_memory_state_file_io() -> TestStateFileIo { + create_in_memory_state_file_io(&[]) + } +} diff --git a/core-primitives/stf-state-handler/src/lib.rs b/core-primitives/stf-state-handler/src/lib.rs index 8b44cf689b..492522ef24 100644 --- a/core-primitives/stf-state-handler/src/lib.rs +++ b/core-primitives/stf-state-handler/src/lib.rs @@ -16,6 +16,7 @@ */ #![cfg_attr(not(feature = "std"), no_std)] +#![feature(assert_matches)] #[cfg(all(feature = "std", feature = "sgx"))] compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the same time"); @@ -31,17 +32,15 @@ pub mod sgx_reexport_prelude { } pub mod error; +pub mod file_io; pub mod handle_state; +mod in_memory_state_file_io; pub mod query_shard_state; - -#[cfg(feature = "sgx")] -pub mod global_file_state_handler; - -#[cfg(feature = "sgx")] -pub use global_file_state_handler::GlobalFileStateHandler; - -#[cfg(feature = "sgx")] -mod file_io; - -#[cfg(all(feature = "test", feature = "sgx"))] -pub mod tests; +pub mod state_handler; +pub mod state_key_repository; +mod state_snapshot_primitives; +pub mod state_snapshot_repository; +pub mod state_snapshot_repository_loader; +pub mod test; + +pub use state_handler::StateHandler; diff --git a/core-primitives/stf-state-handler/src/query_shard_state.rs b/core-primitives/stf-state-handler/src/query_shard_state.rs index 6d5d449a2c..11ff46d044 100644 --- a/core-primitives/stf-state-handler/src/query_shard_state.rs +++ b/core-primitives/stf-state-handler/src/query_shard_state.rs @@ -25,7 +25,7 @@ use std::vec::Vec; /// SGX exclusive data structures (feature sgx) pub trait QueryShardState { /// Query whether a given shard exists - fn exists(&self, shard: &ShardIdentifier) -> bool; + fn shard_exists(&self, shard: &ShardIdentifier) -> Result; /// List all available shards fn list_shards(&self) -> Result>; diff --git a/core-primitives/stf-state-handler/src/state_handler.rs b/core-primitives/stf-state-handler/src/state_handler.rs new file mode 100644 index 0000000000..b4b8010237 --- /dev/null +++ b/core-primitives/stf-state-handler/src/state_handler.rs @@ -0,0 +1,180 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::{SgxRwLock as RwLock, SgxRwLockWriteGuard as RwLockWriteGuard}; + +#[cfg(feature = "std")] +use std::sync::{RwLock, RwLockWriteGuard}; + +use crate::{ + error::{Error, Result}, + handle_state::HandleState, + query_shard_state::QueryShardState, + state_snapshot_repository::VersionedStateAccess, +}; +use itp_types::ShardIdentifier; +use std::vec::Vec; + +/// Implementation of the `HandleState` trait. +/// +/// It's a concurrency wrapper around a state snapshot repository, which handles +/// access to any shards and state files. The state handler ensures we have thread-safe +/// concurrent access to that repository. +pub struct StateHandler { + state_snapshot_repository: RwLock, +} + +impl StateHandler { + pub fn new(state_snapshot_repository: Repository) -> Self { + StateHandler { state_snapshot_repository: RwLock::new(state_snapshot_repository) } + } +} + +impl HandleState for StateHandler +where + Repository: VersionedStateAccess, +{ + type WriteLockPayload = Repository; + type StateT = Repository::StateType; + type HashType = Repository::HashType; + + fn initialize_shard(&self, shard: ShardIdentifier) -> Result { + let mut state_write_lock = + self.state_snapshot_repository.write().map_err(|_| Error::LockPoisoning)?; + state_write_lock.initialize_new_shard(shard) + } + + fn load(&self, shard: &ShardIdentifier) -> Result { + self.state_snapshot_repository + .read() + .map_err(|_| Error::LockPoisoning)? + .load_latest(shard) + } + + fn load_for_mutation( + &self, + shard: &ShardIdentifier, + ) -> Result<(RwLockWriteGuard<'_, Self::WriteLockPayload>, Self::StateT)> { + let state_write_lock = + self.state_snapshot_repository.write().map_err(|_| Error::LockPoisoning)?; + let loaded_state = state_write_lock.load_latest(shard)?; + Ok((state_write_lock, loaded_state)) + } + + fn write_after_mutation( + &self, + state: Self::StateT, + mut state_lock: RwLockWriteGuard<'_, Self::WriteLockPayload>, + shard: &ShardIdentifier, + ) -> Result { + state_lock.update(shard, state) + } + + fn reset(&self, state: Self::StateT, shard: &ShardIdentifier) -> Result { + let mut state_write_lock = + self.state_snapshot_repository.write().map_err(|_| Error::LockPoisoning)?; + + state_write_lock.update(shard, state) + } +} + +impl QueryShardState for StateHandler +where + Repository: VersionedStateAccess, +{ + fn shard_exists(&self, shard: &ShardIdentifier) -> Result { + let registry_lock = + self.state_snapshot_repository.read().map_err(|_| Error::LockPoisoning)?; + + Ok(registry_lock.shard_exists(shard)) + } + + fn list_shards(&self) -> Result> { + self.state_snapshot_repository + .read() + .map_err(|_| Error::LockPoisoning)? + .list_shards() + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use crate::test::mocks::versioned_state_access_mock::VersionedStateAccessMock; + use std::{ + collections::{HashMap, VecDeque}, + sync::Arc, + thread, + }; + + type TestState = u64; + type TestHash = u64; + type TestStateRepository = VersionedStateAccessMock; + type TestStateHandler = StateHandler; + + #[test] + fn load_for_mutation_blocks_any_concurrent_access() { + let shard_id = ShardIdentifier::random(); + let state_handler = default_state_handler(&shard_id); + + let (lock, _s) = state_handler.load_for_mutation(&shard_id).unwrap(); + let new_state = 4u64; + + let state_handler_clone = state_handler.clone(); + let join_handle = thread::spawn(move || { + let latest_state = state_handler_clone.load(&shard_id).unwrap(); + assert_eq!(new_state, latest_state); + }); + + let _hash = state_handler.write_after_mutation(new_state, lock, &shard_id).unwrap(); + + join_handle.join().unwrap(); + } + + #[test] + fn load_initialized_works() { + let shard_id = ShardIdentifier::random(); + let state_handler = default_state_handler(&shard_id); + assert!(state_handler.load(&shard_id).is_ok()); + assert!(state_handler.load(&ShardIdentifier::random()).is_err()); + } + + #[test] + fn list_shards_works() { + let shard_id = ShardIdentifier::random(); + let state_handler = default_state_handler(&shard_id); + assert!(state_handler.list_shards().is_ok()); + } + + #[test] + fn shard_exists_works() { + let shard_id = ShardIdentifier::random(); + let state_handler = default_state_handler(&shard_id); + assert!(state_handler.shard_exists(&shard_id).unwrap()); + assert!(!state_handler.shard_exists(&ShardIdentifier::random()).unwrap()); + } + + fn default_state_handler(shard: &ShardIdentifier) -> Arc { + Arc::new(TestStateHandler::new(default_repository(shard))) + } + + fn default_repository(shard: &ShardIdentifier) -> TestStateRepository { + TestStateRepository::new(HashMap::from([(*shard, VecDeque::from([1, 2, 3]))])) + } +} diff --git a/core-primitives/stf-state-handler/src/state_key_repository.rs b/core-primitives/stf-state-handler/src/state_key_repository.rs new file mode 100644 index 0000000000..4ee6b8c46f --- /dev/null +++ b/core-primitives/stf-state-handler/src/state_key_repository.rs @@ -0,0 +1,96 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + +use crate::error::{Error, Result}; +use itp_sgx_crypto::StateCrypto; +use itp_sgx_io::SealedIO; +use std::sync::Arc; + +pub trait AccessStateKey { + type KeyType: StateCrypto; + + fn retrieve_key(&self) -> Result; +} + +pub trait MutateStateKey { + fn update_key(&self, key: KeyType) -> Result<()>; +} + +pub struct StateKeyRepository { + key_lock: RwLock, + sealed_io: Arc, +} + +impl StateKeyRepository { + pub fn new(key: KeyType, sealed_io: Arc) -> Self { + StateKeyRepository { key_lock: RwLock::new(key), sealed_io } + } +} + +impl AccessStateKey for StateKeyRepository +where + KeyType: StateCrypto + Clone, +{ + type KeyType = KeyType; + + fn retrieve_key(&self) -> Result { + self.key_lock.read().map_err(|_| Error::LockPoisoning).map(|l| l.clone()) + } +} + +impl MutateStateKey for StateKeyRepository +where + KeyType: StateCrypto, + SealedIo: SealedIO, +{ + fn update_key(&self, key: KeyType) -> Result<()> { + let mut key_lock = self.key_lock.write().map_err(|_| Error::LockPoisoning)?; + + self.sealed_io.seal(key)?; + *key_lock = self.sealed_io.unseal()?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use itp_sgx_crypto::{aes::Aes, mocks::AesSealMock}; + + type TestKeyRepository = StateKeyRepository; + + #[test] + fn update_and_retrieve_key_works() { + let seal_mock = Arc::new(AesSealMock::default()); + let key_repository = TestKeyRepository::new(seal_mock.unseal().unwrap(), seal_mock.clone()); + + assert_eq!(seal_mock.unseal().unwrap(), key_repository.retrieve_key().unwrap()); + + let updated_key = Aes::new([2u8; 16], [0u8; 16]); + key_repository.update_key(updated_key).unwrap(); + + assert_eq!(updated_key, key_repository.retrieve_key().unwrap()); + assert_eq!(updated_key, seal_mock.unseal().unwrap()); + } +} diff --git a/core-primitives/stf-state-handler/src/state_snapshot_primitives.rs b/core-primitives/stf-state-handler/src/state_snapshot_primitives.rs new file mode 100644 index 0000000000..cd464b7201 --- /dev/null +++ b/core-primitives/stf-state-handler/src/state_snapshot_primitives.rs @@ -0,0 +1,55 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{error::Result, file_io::StateFileIo}; +use itp_time_utils::now_as_nanos; +use itp_types::ShardIdentifier; +use std::collections::{HashMap, VecDeque}; + +pub type StateId = u128; + +pub(crate) type SnapshotHistory = + HashMap>>; + +/// Internal wrapper for a state hash and state ID. +#[derive(Clone)] +pub(crate) struct StateSnapshotMetaData { + pub(crate) state_hash: HashType, + pub(crate) state_id: StateId, +} + +impl StateSnapshotMetaData { + pub fn new(state_hash: HashType, state_id: StateId) -> Self { + StateSnapshotMetaData { state_hash, state_id } + } +} + +pub(crate) fn initialize_shard_with_snapshot( + shard_identifier: &ShardIdentifier, + file_io: &FileIo, +) -> Result> +where + FileIo: StateFileIo, +{ + let state_id = generate_current_timestamp_state_id(); + let state_hash = file_io.create_initialized(shard_identifier, state_id)?; + Ok(StateSnapshotMetaData::new(state_hash, state_id)) +} + +pub(crate) fn generate_current_timestamp_state_id() -> StateId { + now_as_nanos() +} diff --git a/core-primitives/stf-state-handler/src/state_snapshot_repository.rs b/core-primitives/stf-state-handler/src/state_snapshot_repository.rs new file mode 100644 index 0000000000..6239aed50d --- /dev/null +++ b/core-primitives/stf-state-handler/src/state_snapshot_repository.rs @@ -0,0 +1,443 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + error::{Error, Result}, + file_io::StateFileIo, + state_snapshot_primitives::{ + generate_current_timestamp_state_id, initialize_shard_with_snapshot, SnapshotHistory, + StateId, StateSnapshotMetaData, + }, +}; +use core::ops::RangeBounds; +use itp_types::ShardIdentifier; +use log::*; +use std::{collections::VecDeque, fmt::Debug, format, marker::PhantomData, sync::Arc, vec::Vec}; + +/// Trait for versioned state access. Manages history of state snapshots. +pub trait VersionedStateAccess { + type StateType; + type HashType; + + /// Load the latest version of the state. + fn load_latest(&self, shard_identifier: &ShardIdentifier) -> Result; + + /// Update the state, returning the hash of the state. + fn update( + &mut self, + shard_identifier: &ShardIdentifier, + state: Self::StateType, + ) -> Result; + + /// Reverts the state of a given shard to a state version identified by a state hash. + fn revert_to( + &mut self, + shard_identifier: &ShardIdentifier, + state_hash: &Self::HashType, + ) -> Result; + + /// Initialize a new shard. + fn initialize_new_shard(&mut self, shard_identifier: ShardIdentifier) + -> Result; + + /// Checks if a shard for a given identifier exists. + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool; + + /// Lists all shards. + fn list_shards(&self) -> Result>; +} + +/// State snapshot repository. +/// +/// Keeps versions of state snapshots, cycles them in a fixed-size circular buffer. +/// Creates a state snapshot for each write/update operation. Allows reverting to a specific snapshot, +/// identified by a state hash. Snapshot files names includes a timestamp to be unique. +pub struct StateSnapshotRepository { + file_io: Arc, + snapshot_history_cache_size: usize, + snapshot_history: SnapshotHistory, + phantom_data: PhantomData, +} + +impl StateSnapshotRepository +where + FileIo: StateFileIo, + HashType: Copy + Eq + Debug, +{ + /// Constructor, initialized with no shards or snapshot history. + pub fn empty(file_io: Arc, snapshot_history_cache_size: usize) -> Result { + Self::new(file_io, snapshot_history_cache_size, SnapshotHistory::default()) + } + + /// Constructor to initialize the repository with shards and snapshot history. + /// + /// Crate private, to be used by the loader. + pub(crate) fn new( + file_io: Arc, + snapshot_history_cache_size: usize, + snapshot_history: SnapshotHistory, + ) -> Result { + if snapshot_history_cache_size == 0usize { + return Err(Error::ZeroCacheSize) + } + + Ok(StateSnapshotRepository { + file_io, + snapshot_history_cache_size, + snapshot_history, + phantom_data: Default::default(), + }) + } + + fn get_snapshot_history_mut( + &mut self, + shard_identifier: &ShardIdentifier, + ) -> Result<&mut VecDeque>> { + self.snapshot_history + .get_mut(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier)) + } + + fn get_snapshot_history( + &self, + shard_identifier: &ShardIdentifier, + ) -> Result<&VecDeque>> { + self.snapshot_history + .get(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier)) + } + + fn get_latest_snapshot_metadata( + &self, + shard_identifier: &ShardIdentifier, + ) -> Result<&StateSnapshotMetaData> { + let snapshot_history = self.get_snapshot_history(shard_identifier)?; + snapshot_history.front().ok_or(Error::EmptyRepository) + } + + fn prune_snapshot_history_by_range>( + &mut self, + shard_identifier: &ShardIdentifier, + range: R, + ) -> Result<()> { + let state_snapshots_to_remove = self + .get_snapshot_history_mut(shard_identifier)? + .drain(range) + .collect::>(); + + self.remove_snapshots(shard_identifier, state_snapshots_to_remove.as_slice()); + Ok(()) + } + + /// Remove snapshots referenced by metadata. + /// Does not stop on error, it's guaranteed to call `remove` on all elements. + /// Logs any errors that occur. + fn remove_snapshots( + &self, + shard_identifier: &ShardIdentifier, + snapshots_metadata: &[StateSnapshotMetaData], + ) { + for snapshot_metadata in snapshots_metadata { + if let Err(e) = self.file_io.remove(shard_identifier, snapshot_metadata.state_id) { + // We just log an error, don't want to return the error here, because the operation + // in general was successful, just a side-effect that failed. + error!("Failed to remove state, with id '{}': {:?}", snapshot_metadata.state_id, e); + } + } + } + + fn write_new_state( + &self, + shard_identifier: &ShardIdentifier, + state: State, + ) -> Result<(HashType, StateId)> { + let state_id = generate_current_timestamp_state_id(); + let state_hash = self.file_io.write(shard_identifier, state_id, state)?; + Ok((state_hash, state_id)) + } + + fn load_state( + &self, + shard_identifier: &ShardIdentifier, + snapshot_metadata: &StateSnapshotMetaData, + ) -> Result { + self.file_io.load(shard_identifier, snapshot_metadata.state_id) + } +} + +impl VersionedStateAccess + for StateSnapshotRepository +where + FileIo: StateFileIo, + HashType: Copy + Eq + Debug, +{ + type StateType = State; + type HashType = HashType; + + fn load_latest(&self, shard_identifier: &ShardIdentifier) -> Result { + let latest_snapshot_metadata = self.get_latest_snapshot_metadata(shard_identifier)?; + self.file_io.load(shard_identifier, latest_snapshot_metadata.state_id) + } + + fn update( + &mut self, + shard_identifier: &ShardIdentifier, + state: Self::StateType, + ) -> Result { + if !self.shard_exists(shard_identifier) { + return Err(Error::InvalidShard(*shard_identifier)) + } + + let (state_hash, state_id) = self.write_new_state(shard_identifier, state)?; + let cache_size = self.snapshot_history_cache_size; + + let snapshot_history = self.get_snapshot_history_mut(shard_identifier)?; + snapshot_history.push_front(StateSnapshotMetaData::new(state_hash, state_id)); + + // In case we're above max queue size we remove the oldest entries and corresponding files + if snapshot_history.len() > cache_size { + self.prune_snapshot_history_by_range(shard_identifier, cache_size..)?; + } + + Ok(state_hash) + } + + fn revert_to( + &mut self, + shard_identifier: &ShardIdentifier, + state_hash: &Self::HashType, + ) -> Result { + let snapshot_history = self.get_snapshot_history(shard_identifier)?; + + // We use `position()` instead of `find()`, because it then allows us to easily drain + // all the newer states. + let snapshot_metadata_index = snapshot_history + .iter() + .position(|fmd| fmd.state_hash == *state_hash) + .ok_or_else(|| Error::StateNotFoundInRepository(format!("{:?}", state_hash)))?; + + // Should never fail, since we got the index from above, with `position()`. + let snapshot_metadata = snapshot_history + .get(snapshot_metadata_index) + .ok_or_else(|| Error::StateNotFoundInRepository(format!("{:?}", state_hash)))?; + + let state = self.load_state(shard_identifier, snapshot_metadata)?; + + // Remove any state versions newer than the one we're resetting to + // (do this irreversible operation last, to ensure the loading has succeeded) + self.prune_snapshot_history_by_range(shard_identifier, ..snapshot_metadata_index)?; + + Ok(state) + } + + fn initialize_new_shard( + &mut self, + shard_identifier: ShardIdentifier, + ) -> Result { + if let Some(state_snapshots) = self.snapshot_history.get(&shard_identifier) { + warn!("Shard ({:?}) already exists, will not initialize again", shard_identifier); + return state_snapshots.front().map(|s| s.state_hash).ok_or(Error::EmptyRepository) + } + + let snapshot_metadata = + initialize_shard_with_snapshot(&shard_identifier, self.file_io.as_ref())?; + + let state_hash = snapshot_metadata.state_hash; + self.snapshot_history + .insert(shard_identifier, VecDeque::from([snapshot_metadata])); + Ok(state_hash) + } + + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool { + self.snapshot_history.get(shard_identifier).is_some() + } + + fn list_shards(&self) -> Result> { + Ok(self.snapshot_history.keys().cloned().collect()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + in_memory_state_file_io::InMemoryStateFileIo, + state_snapshot_repository_loader::StateSnapshotRepositoryLoader, + }; + use std::{collections::hash_map::DefaultHasher, vec}; + + type TestState = u64; + type TestStateHash = u64; + type TestFileIo = InMemoryStateFileIo; + type TestSnapshotRepository = StateSnapshotRepository; + + const TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE: usize = 3; + + #[test] + fn new_with_zero_cache_size_returns_error() { + let shards = + vec![ShardIdentifier::random(), ShardIdentifier::random(), ShardIdentifier::random()]; + let file_io = create_test_file_io(shards.as_slice()); + + assert!(TestSnapshotRepository::empty(file_io.clone(), 0usize).is_err()); + } + + #[test] + fn upon_new_all_shards_are_initialized() { + let shards = + vec![ShardIdentifier::random(), ShardIdentifier::random(), ShardIdentifier::random()]; + let (file_io, state_snapshot_repository) = create_state_snapshot_repository( + shards.as_slice(), + TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE, + ); + + assert_eq!(shards.len(), file_io.list_shards().unwrap().len()); + assert_eq!(shards.len(), state_snapshot_repository.snapshot_history.len()); + assert_eq!(shards.len(), state_snapshot_repository.list_shards().unwrap().len()); + for states_per_shard in state_snapshot_repository.snapshot_history.values() { + assert_eq!(1, states_per_shard.len()); + } + for shard in shards { + assert!(state_snapshot_repository.load_latest(&shard).is_ok()); + assert!(state_snapshot_repository.shard_exists(&shard)); + } + } + + #[test] + fn update_latest_creates_new_state_file() { + let shards = + vec![ShardIdentifier::random(), ShardIdentifier::random(), ShardIdentifier::random()]; + let (file_io, mut state_snapshot_repository) = create_state_snapshot_repository( + shards.as_slice(), + TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE, + ); + + let shard_to_update = shards.get(1).unwrap(); + assert_eq!(1, file_io.get_states_for_shard(shard_to_update).unwrap().len()); + + let new_state = 1234u64; + + let _ = state_snapshot_repository.update(shard_to_update, new_state).unwrap(); + + let snapshot_history = + state_snapshot_repository.snapshot_history.get(shard_to_update).unwrap(); + assert_eq!(2, snapshot_history.len()); + assert_eq!(new_state, state_snapshot_repository.load_latest(shard_to_update).unwrap()); + assert_eq!(2, file_io.get_states_for_shard(shard_to_update).unwrap().len()); + } + + #[test] + fn update_latest_prunes_states_when_above_cache_size() { + let shard_id = ShardIdentifier::random(); + let (file_io, mut state_snapshot_repository) = + create_state_snapshot_repository(&[shard_id], TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE); + + let states = vec![1u64, 2u64, 3u64, 4u64, 5u64, 6u64]; + assert!(states.len() > TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE); // ensures we have pruning + + states.iter().for_each(|state| { + let _ = state_snapshot_repository.update(&shard_id, *state).unwrap(); + }); + + let snapshot_history = state_snapshot_repository.snapshot_history.get(&shard_id).unwrap(); + assert_eq!(TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE, snapshot_history.len()); + assert_eq!( + *states.last().unwrap(), + state_snapshot_repository.load_latest(&shard_id).unwrap() + ); + assert_eq!( + TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE, + file_io.get_states_for_shard(&shard_id).unwrap().len() + ); + } + + #[test] + fn update_latest_with_invalid_shard_returns_error_without_modification() { + let shard_id = ShardIdentifier::random(); + let (file_io, mut state_snapshot_repository) = + create_state_snapshot_repository(&[shard_id], TEST_SNAPSHOT_REPOSITORY_CACHE_SIZE); + + assert!(state_snapshot_repository.update(&ShardIdentifier::random(), 45).is_err()); + + let snapshot_history = state_snapshot_repository.snapshot_history.get(&shard_id).unwrap(); + assert_eq!(1, snapshot_history.len()); + assert_eq!(0u64, state_snapshot_repository.load_latest(&shard_id).unwrap()); + assert_eq!(1, file_io.get_states_for_shard(&shard_id).unwrap().len()); + } + + #[test] + fn revert_to_removes_version_newer_than_target_hash() { + let shard_id = ShardIdentifier::random(); + let (file_io, mut state_snapshot_repository) = + create_state_snapshot_repository(&[shard_id], 6); + + let states = vec![1u64, 2u64, 3u64, 4u64, 5u64]; + + let state_hashes = states + .iter() + .map(|state| state_snapshot_repository.update(&shard_id, *state).unwrap()) + .collect::>(); + let revert_target_hash = state_hashes.get(1).unwrap(); + + let reverted_state = + state_snapshot_repository.revert_to(&shard_id, revert_target_hash).unwrap(); + + assert_eq!(2u64, reverted_state); + assert_eq!(3, state_snapshot_repository.snapshot_history.get(&shard_id).unwrap().len()); // because we have initialized version '0' as well + assert_eq!(2u64, state_snapshot_repository.load_latest(&shard_id).unwrap()); + assert_eq!(3, file_io.get_states_for_shard(&shard_id).unwrap().len()); + } + + #[test] + fn initializing_new_shard_works() { + let (_, mut state_snapshot_repository) = create_state_snapshot_repository(&[], 2); + + let shard_id = ShardIdentifier::random(); + + assert!(state_snapshot_repository.load_latest(&shard_id).is_err()); + assert!(state_snapshot_repository.list_shards().unwrap().is_empty()); + + let _hash = state_snapshot_repository.initialize_new_shard(shard_id).unwrap(); + + assert!(state_snapshot_repository.load_latest(&shard_id).is_ok()); + assert_eq!(1, state_snapshot_repository.list_shards().unwrap().len()); + } + + #[test] + fn initialize_new_state_when_shard_already_exists_returns_ok() { + let shard_id = ShardIdentifier::random(); + let (_, mut state_snapshot_repository) = create_state_snapshot_repository(&[shard_id], 2); + + let _hash = state_snapshot_repository.initialize_new_shard(shard_id).unwrap(); + + assert!(state_snapshot_repository.load_latest(&shard_id).is_ok()); + assert_eq!(1, state_snapshot_repository.list_shards().unwrap().len()); + } + + fn create_state_snapshot_repository( + shards: &[ShardIdentifier], + snapshot_history_size: usize, + ) -> (Arc, TestSnapshotRepository) { + let file_io = create_test_file_io(shards); + let repository_loader = StateSnapshotRepositoryLoader::new(file_io.clone()); + (file_io, repository_loader.load_snapshot_repository(snapshot_history_size).unwrap()) + } + + fn create_test_file_io(shards: &[ShardIdentifier]) -> Arc { + Arc::new(TestFileIo::new(DefaultHasher::default(), shards)) + } +} diff --git a/core-primitives/stf-state-handler/src/state_snapshot_repository_loader.rs b/core-primitives/stf-state-handler/src/state_snapshot_repository_loader.rs new file mode 100644 index 0000000000..729dbbc2f8 --- /dev/null +++ b/core-primitives/stf-state-handler/src/state_snapshot_repository_loader.rs @@ -0,0 +1,202 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + error::Result, + file_io::StateFileIo, + state_snapshot_primitives::{ + initialize_shard_with_snapshot, SnapshotHistory, StateId, StateSnapshotMetaData, + }, + state_snapshot_repository::StateSnapshotRepository, +}; +use itp_types::ShardIdentifier; +use log::*; +use std::{ + collections::VecDeque, fmt::Debug, iter::FromIterator, marker::PhantomData, sync::Arc, vec::Vec, +}; + +/// Loads a state snapshot repository from existing shards directory with state files. +pub struct StateSnapshotRepositoryLoader { + file_io: Arc, + phantom_data: PhantomData<(State, HashType)>, +} + +impl StateSnapshotRepositoryLoader +where + FileIo: StateFileIo, + HashType: Copy + Eq + Debug, +{ + pub fn new(file_io: Arc) -> Self { + StateSnapshotRepositoryLoader { file_io, phantom_data: Default::default() } + } + + /// Load a state snapshot repository from an existing set of files and directories. + pub fn load_snapshot_repository( + &self, + snapshot_history_cache_size: usize, + ) -> Result> { + let snapshot_history = self.load_and_initialize_state_snapshot_history()?; + + StateSnapshotRepository::new( + self.file_io.clone(), + snapshot_history_cache_size, + snapshot_history, + ) + } + + fn load_and_initialize_state_snapshot_history(&self) -> Result> { + let mut repository = SnapshotHistory::new(); + + let shards = self.file_io.list_shards()?; + debug!("Found {} shard(s) to load state from", shards.len()); + + for shard in shards { + let mut state_ids = self.file_io.list_state_ids_for_shard(&shard)?; + // Sort by id (which are timestamp), highest, i.e. newest, first + state_ids.sort_unstable(); + state_ids.reverse(); + + let mut snapshot_metadata: Vec<_> = self.map_to_snapshot_metadata(&shard, state_ids); + + if snapshot_metadata.is_empty() { + warn!( + "No (valid) states found for shard {:?}, initializing empty shard state", + shard + ); + let initial_snapshot_metadata = + initialize_shard_with_snapshot(&shard, self.file_io.as_ref())?; + snapshot_metadata.push(initial_snapshot_metadata); + } else { + debug!( + "Found {} state snapshot(s) for shard {}, latest snapshot is {}", + snapshot_metadata.len(), + &shard, + snapshot_metadata.first().map(|f| f.state_id).unwrap_or_default() + ); + } + + let snapshot_history = VecDeque::from_iter(snapshot_metadata); + + repository.insert(shard, snapshot_history); + } + Ok(repository) + } + + fn map_to_snapshot_metadata( + &self, + shard: &ShardIdentifier, + state_ids: Vec, + ) -> Vec> { + state_ids + .into_iter() + .flat_map(|state_id| match self.file_io.compute_hash(shard, state_id) { + Ok(hash) => Some(StateSnapshotMetaData::new(hash, state_id)), + Err(e) => { + warn!( + "Failed to compute hash for state snapshot with id {}: {:?}, ignoring snapshot as a result", + state_id, e + ); + None + }, + }) + .collect() + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use crate::in_memory_state_file_io::InMemoryStateFileIo; + use std::collections::hash_map::DefaultHasher; + + type TestState = u64; + type TestStateHash = u64; + type TestFileIo = InMemoryStateFileIo; + type TestLoader = StateSnapshotRepositoryLoader; + + #[test] + fn loading_from_empty_shard_directories_initializes_files() { + let shards = + vec![ShardIdentifier::random(), ShardIdentifier::random(), ShardIdentifier::random()]; + let (_, loader) = create_test_fixtures(shards.as_slice()); + + let snapshot_history = loader.load_and_initialize_state_snapshot_history().unwrap(); + assert_eq!(shards.len(), snapshot_history.len()); + for snapshots in snapshot_history.values() { + assert_eq!(1, snapshots.len()); + } + } + + #[test] + fn loading_without_shards_returns_empty_directory() { + let (_, loader) = create_test_fixtures(&[]); + + let snapshot_history = loader.load_and_initialize_state_snapshot_history().unwrap(); + assert!(snapshot_history.is_empty()); + } + + #[test] + fn loading_from_files_orders_by_timestamp() { + let shards = + vec![ShardIdentifier::random(), ShardIdentifier::random(), ShardIdentifier::random()]; + let (file_io, loader) = create_test_fixtures(shards.as_slice()); + + add_state_snapshots( + file_io.as_ref(), + &shards[0], + &[1_000_000, 2_000_000, 3_000_000, 4_000_000], + ); + add_state_snapshots(file_io.as_ref(), &shards[1], &[10_000_000, 9_000_000]); + add_state_snapshots(file_io.as_ref(), &shards[2], &[14_000_000, 11_000_000, 12_000_000]); + + let snapshot_history = loader.load_and_initialize_state_snapshot_history().unwrap(); + + assert_eq!(shards.len(), snapshot_history.len()); + assert_latest_state_id(&snapshot_history, &shards[0], 4_000_000); + assert_latest_state_id(&snapshot_history, &shards[1], 10_000_000); + assert_latest_state_id(&snapshot_history, &shards[2], 14_000_000); + } + + fn add_state_snapshots(file_io: &TestFileIo, shard: &ShardIdentifier, state_ids: &[StateId]) { + for state_id in state_ids { + add_snapshot_with_state_ids(file_io, shard, *state_id); + } + } + + fn add_snapshot_with_state_ids( + file_io: &TestFileIo, + shard: &ShardIdentifier, + state_id: StateId, + ) { + file_io.create_initialized(shard, state_id).unwrap(); + } + + fn assert_latest_state_id( + snapshot_history: &SnapshotHistory, + shard: &ShardIdentifier, + state_id: StateId, + ) { + assert_eq!(snapshot_history.get(shard).unwrap().front().unwrap().state_id, state_id) + } + + fn create_test_fixtures(shards: &[ShardIdentifier]) -> (Arc, TestLoader) { + let file_io = Arc::new(TestFileIo::new(DefaultHasher::default(), shards)); + let loader = StateSnapshotRepositoryLoader::new(file_io.clone()); + (file_io, loader) + } +} diff --git a/core-primitives/stf-state-handler/src/test/mocks/mod.rs b/core-primitives/stf-state-handler/src/test/mocks/mod.rs new file mode 100644 index 0000000000..400f2530c7 --- /dev/null +++ b/core-primitives/stf-state-handler/src/test/mocks/mod.rs @@ -0,0 +1,19 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +pub mod state_key_repository_mock; +pub mod versioned_state_access_mock; diff --git a/core-primitives/stf-state-handler/src/test/mocks/state_key_repository_mock.rs b/core-primitives/stf-state-handler/src/test/mocks/state_key_repository_mock.rs new file mode 100644 index 0000000000..d1b958c8ac --- /dev/null +++ b/core-primitives/stf-state-handler/src/test/mocks/state_key_repository_mock.rs @@ -0,0 +1,68 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + +use crate::{ + error::Result, + state_key_repository::{AccessStateKey, MutateStateKey}, +}; +use itp_sgx_crypto::StateCrypto; + +#[derive(Default)] +pub struct StateKeyRepositoryMock +where + KeyType: StateCrypto + Clone + Default, +{ + key: RwLock, +} + +impl StateKeyRepositoryMock +where + KeyType: StateCrypto + Clone + Default, +{ + #[cfg(all(feature = "test", feature = "sgx"))] + pub fn new(key: KeyType) -> Self { + StateKeyRepositoryMock { key: RwLock::new(key) } + } +} + +impl AccessStateKey for StateKeyRepositoryMock +where + KeyType: StateCrypto + Clone + Default, +{ + type KeyType = KeyType; + + fn retrieve_key(&self) -> Result { + Ok(self.key.read().unwrap().clone()) + } +} + +impl MutateStateKey for StateKeyRepositoryMock +where + KeyType: StateCrypto + Clone + Default, +{ + fn update_key(&self, key: KeyType) -> Result<()> { + let mut lock = self.key.write().unwrap(); + *lock = key; + Ok(()) + } +} diff --git a/core-primitives/stf-state-handler/src/test/mocks/versioned_state_access_mock.rs b/core-primitives/stf-state-handler/src/test/mocks/versioned_state_access_mock.rs new file mode 100644 index 0000000000..65294db6e6 --- /dev/null +++ b/core-primitives/stf-state-handler/src/test/mocks/versioned_state_access_mock.rs @@ -0,0 +1,100 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + error::{Error, Result}, + state_snapshot_repository::VersionedStateAccess, +}; +use itp_types::ShardIdentifier; +use std::{ + collections::{HashMap, VecDeque}, + marker::PhantomData, + string::ToString, + vec::Vec, +}; + +#[derive(Default, Clone)] +pub struct VersionedStateAccessMock { + state_history: HashMap>, + phantom_data: PhantomData, +} + +impl VersionedStateAccessMock { + #[cfg(test)] + pub fn new(state_history: HashMap>) -> Self { + VersionedStateAccessMock { state_history, phantom_data: Default::default() } + } +} + +impl VersionedStateAccess for VersionedStateAccessMock +where + State: Default + Clone, + Hash: Default, +{ + type StateType = State; + type HashType = Hash; + + fn load_latest(&self, shard_identifier: &ShardIdentifier) -> Result { + self.state_history + .get(shard_identifier) + .ok_or(Error::InvalidShard(*shard_identifier))? + .front() + .cloned() + .ok_or(Error::StateNotFoundInRepository("".to_string())) + } + + fn update( + &mut self, + shard_identifier: &ShardIdentifier, + state: Self::StateType, + ) -> Result { + let state_history = self + .state_history + .entry(*shard_identifier) + .or_insert_with(|| VecDeque::default()); + state_history.push_front(state); + Ok(Hash::default()) + } + + fn revert_to( + &mut self, + shard_identifier: &ShardIdentifier, + _state_hash: &Self::HashType, + ) -> Result { + let state_history = self + .state_history + .get_mut(shard_identifier) + .ok_or_else(|| Error::InvalidShard(*shard_identifier))?; + state_history.drain(..).last().ok_or(Error::EmptyRepository) + } + + fn initialize_new_shard( + &mut self, + shard_identifier: ShardIdentifier, + ) -> Result { + self.state_history.insert(shard_identifier, VecDeque::from([State::default()])); + Ok(Hash::default()) + } + + fn shard_exists(&self, shard_identifier: &ShardIdentifier) -> bool { + self.state_history.get(shard_identifier).is_some() + } + + fn list_shards(&self) -> Result> { + Ok(self.state_history.keys().copied().collect()) + } +} diff --git a/core-primitives/stf-state-handler/src/test/mod.rs b/core-primitives/stf-state-handler/src/test/mod.rs new file mode 100644 index 0000000000..e3552cd37f --- /dev/null +++ b/core-primitives/stf-state-handler/src/test/mod.rs @@ -0,0 +1,25 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(test)] +pub(crate) mod mocks; + +#[cfg(all(feature = "test", feature = "sgx"))] +pub mod mocks; + +#[cfg(all(feature = "test", feature = "sgx"))] +pub mod sgx_tests; diff --git a/core-primitives/stf-state-handler/src/test/sgx_tests.rs b/core-primitives/stf-state-handler/src/test/sgx_tests.rs new file mode 100644 index 0000000000..fb9c9c3ab5 --- /dev/null +++ b/core-primitives/stf-state-handler/src/test/sgx_tests.rs @@ -0,0 +1,340 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + error::{Error, Result}, + file_io::{ + purge_shard_dir, + sgx::{init_shard, shard_exists, SgxStateFileIo}, + shard_path, StateFileIo, + }, + handle_state::HandleState, + query_shard_state::QueryShardState, + state_handler::StateHandler, + state_snapshot_repository::StateSnapshotRepository, + state_snapshot_repository_loader::StateSnapshotRepositoryLoader, + test::mocks::state_key_repository_mock::StateKeyRepositoryMock, +}; +use codec::{Decode, Encode}; +use ita_stf::{State as StfState, StateType as StfStateType}; +use itp_sgx_crypto::{Aes, AesSeal, StateCrypto}; +use itp_sgx_io::{write, StaticSealedIO}; +use itp_types::{ShardIdentifier, H256}; +use sgx_externalities::SgxExternalitiesTrait; +use sp_core::hashing::blake2_256; +use std::{sync::Arc, thread, vec::Vec}; + +const STATE_SNAPSHOTS_CACHE_SIZE: usize = 3; + +type TestStateFileIo = SgxStateFileIo>; +type TestStateRepository = StateSnapshotRepository; +type TestStateRepositoryLoader = StateSnapshotRepositoryLoader; +type TestStateHandler = StateHandler; + +/// Directory handle to automatically initialize a directory +/// and upon dropping the reference, removing it again. +struct ShardDirectoryHandle { + shard: ShardIdentifier, +} + +impl ShardDirectoryHandle { + pub fn new(shard: ShardIdentifier) -> Result { + given_initialized_shard(&shard)?; + Ok(ShardDirectoryHandle { shard }) + } +} + +impl Drop for ShardDirectoryHandle { + fn drop(&mut self) { + purge_shard_dir(&self.shard) + } +} + +// Fixme: Move this test to sgx-runtime: +// +// https://github.com/integritee-network/sgx-runtime/issues/23 +pub fn test_sgx_state_decode_encode_works() { + // given + let state = given_hello_world_state(); + + // when + let encoded_state = state.state.encode(); + let state2 = StfStateType::decode(&mut encoded_state.as_slice()).unwrap(); + + // then + assert_eq!(state.state, state2); +} + +pub fn test_encrypt_decrypt_state_type_works() { + // given + let state = given_hello_world_state(); + let state_key = AesSeal::unseal_from_static_file().unwrap(); + + // when + let mut state_buffer = state.state.encode(); + state_key.encrypt(&mut state_buffer).unwrap(); + + state_key.decrypt(&mut state_buffer).unwrap(); + let decoded = StfStateType::decode(&mut state_buffer.as_slice()).unwrap(); + + // then + assert_eq!(state.state, decoded); +} + +pub fn test_write_and_load_state_works() { + // given + let shard: ShardIdentifier = [94u8; 32].into(); + let (state_handler, shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + let state = given_hello_world_state(); + + // when + let (lock, _s) = state_handler.load_for_mutation(&shard).unwrap(); + let _hash = state_handler.write_after_mutation(state.clone(), lock, &shard).unwrap(); + + let result = state_handler.load(&shard).unwrap(); + + // then + assert_eq!(state.state, result.state); + + // clean up + std::mem::drop(shard_dir_handle); +} + +pub fn test_ensure_subsequent_state_loads_have_same_hash() { + // given + let shard: ShardIdentifier = [49u8; 32].into(); + let (state_handler, shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + let (lock, initial_state) = state_handler.load_for_mutation(&shard).unwrap(); + state_handler.write_after_mutation(initial_state.clone(), lock, &shard).unwrap(); + + let state_loaded = state_handler.load(&shard).unwrap(); + + assert_eq!(hash_of(&initial_state.state), hash_of(&state_loaded.state)); + + // clean up + std::mem::drop(shard_dir_handle); +} + +fn hash_of(encodable: &T) -> H256 { + encodable.using_encoded(blake2_256).into() +} + +pub fn test_write_access_locks_read_until_finished() { + // here we want to test that a lock we obtain for + // mutating state locks out any read attempt that happens during that time + + // given + let shard: ShardIdentifier = [47u8; 32].into(); + let (state_handler, shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + let new_state_key = "my_new_state".encode(); + let (lock, mut state_to_mutate) = state_handler.load_for_mutation(&shard).unwrap(); + + // spawn a new thread that reads state + // this thread should be blocked until the write lock is released, i.e. until + // the new state is written. We can verify this, by trying to read that state variable + // that will be inserted further down below + let new_state_key_for_read = new_state_key.clone(); + let state_handler_clone = state_handler.clone(); + let shard_for_read = shard.clone(); + let join_handle = thread::spawn(move || { + let state_to_read = state_handler_clone.load(&shard_for_read).unwrap(); + assert!(state_to_read.get(new_state_key_for_read.as_slice()).is_some()); + }); + + assert!(state_to_mutate.get(new_state_key.clone().as_slice()).is_none()); + state_to_mutate.insert(new_state_key, "mega_secret_value".encode()); + + let _hash = state_handler.write_after_mutation(state_to_mutate, lock, &shard).unwrap(); + + join_handle.join().unwrap(); + + // clean up + std::mem::drop(shard_dir_handle); +} + +pub fn test_state_handler_file_backend_is_initialized() { + let shard: ShardIdentifier = [11u8; 32].into(); + let (state_handler, shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + assert!(state_handler.shard_exists(&shard).unwrap()); + assert!(1 <= state_handler.list_shards().unwrap().len()); // only greater equal, because there might be other (non-test) shards present + assert_eq!(1, number_of_files_in_shard_dir(&shard).unwrap()); // creates a first initialized file + + let _state = state_handler.load(&shard).unwrap(); + + assert_eq!(1, number_of_files_in_shard_dir(&shard).unwrap()); + + // clean up + std::mem::drop(shard_dir_handle); +} + +pub fn test_multiple_state_updates_create_snapshots_up_to_cache_size() { + let shard: ShardIdentifier = [17u8; 32].into(); + let (state_handler, _shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + assert_eq!(1, number_of_files_in_shard_dir(&shard).unwrap()); + + let hash_1 = update_state( + state_handler.as_ref(), + &shard, + ("my_key_1".encode(), "mega_secret_value".encode()), + ); + assert_eq!(2, number_of_files_in_shard_dir(&shard).unwrap()); + + let hash_2 = update_state( + state_handler.as_ref(), + &shard, + ("my_key_2".encode(), "mega_secret_value222".encode()), + ); + assert_eq!(3, number_of_files_in_shard_dir(&shard).unwrap()); + + let hash_3 = update_state( + state_handler.as_ref(), + &shard, + ("my_key_3".encode(), "mega_secret_value3".encode()), + ); + assert_eq!(3, number_of_files_in_shard_dir(&shard).unwrap()); + + let hash_4 = update_state( + state_handler.as_ref(), + &shard, + ("my_key_3".encode(), "mega_secret_valuenot3".encode()), + ); + assert_eq!(3, number_of_files_in_shard_dir(&shard).unwrap()); + + assert_ne!(hash_1, hash_2); + assert_ne!(hash_1, hash_3); + assert_ne!(hash_1, hash_4); + assert_ne!(hash_2, hash_3); + assert_ne!(hash_2, hash_4); + assert_ne!(hash_3, hash_4); + + assert_eq!(STATE_SNAPSHOTS_CACHE_SIZE, number_of_files_in_shard_dir(&shard).unwrap()); +} + +pub fn test_file_io_get_state_hash_works() { + let shard: ShardIdentifier = [21u8; 32].into(); + let _shard_dir_handle = ShardDirectoryHandle::new(shard).unwrap(); + let state_key_access = + Arc::new(StateKeyRepositoryMock::new(AesSeal::unseal_from_static_file().unwrap())); + + let file_io = TestStateFileIo::new(state_key_access); + + let state_id = 1234u128; + let state_hash = file_io.create_initialized(&shard, state_id).unwrap(); + assert_eq!(state_hash, file_io.compute_hash(&shard, state_id).unwrap()); + + let state_hash = file_io.write(&shard, state_id, given_hello_world_state()).unwrap(); + assert_eq!(state_hash, file_io.compute_hash(&shard, state_id).unwrap()); +} + +pub fn test_state_files_from_handler_can_be_loaded_again() { + let shard: ShardIdentifier = [15u8; 32].into(); + let (state_handler, _shard_dir_handle) = initialize_state_handler_with_directory_handle(&shard); + + update_state(state_handler.as_ref(), &shard, ("test_key_1".encode(), "value1".encode())); + update_state(state_handler.as_ref(), &shard, ("test_key_2".encode(), "value2".encode())); + update_state( + state_handler.as_ref(), + &shard, + ("test_key_2".encode(), "value2_updated".encode()), + ); + update_state(state_handler.as_ref(), &shard, ("test_key_3".encode(), "value3".encode())); + + // We initialize another state handler to load the state from the changes we just made. + let updated_state_handler = initialize_state_handler(); + + assert_eq!(STATE_SNAPSHOTS_CACHE_SIZE, number_of_files_in_shard_dir(&shard).unwrap()); + assert_eq!( + &"value3".encode(), + updated_state_handler + .load(&shard) + .unwrap() + .state() + .get("test_key_3".encode().as_slice()) + .unwrap() + ); +} + +pub fn test_list_state_ids_ignores_files_not_matching_the_pattern() { + let shard: ShardIdentifier = [21u8; 32].into(); + let _shard_dir_handle = ShardDirectoryHandle::new(shard).unwrap(); + let state_key_access = + Arc::new(StateKeyRepositoryMock::new(AesSeal::unseal_from_static_file().unwrap())); + + let file_io = TestStateFileIo::new(state_key_access); + + let mut invalid_state_file_path = shard_path(&shard); + invalid_state_file_path.push("invalid-state.bin"); + write(&[0, 1, 2, 3, 4, 5], invalid_state_file_path).unwrap(); + + file_io.create_initialized(&shard, 1234).unwrap(); + + assert_eq!(1, file_io.list_state_ids_for_shard(&shard).unwrap().len()); +} + +fn initialize_state_handler_with_directory_handle( + shard: &ShardIdentifier, +) -> (Arc, ShardDirectoryHandle) { + let shard_dir_handle = ShardDirectoryHandle::new(*shard).unwrap(); + (initialize_state_handler(), shard_dir_handle) +} + +fn initialize_state_handler() -> Arc { + let state_key_access = + Arc::new(StateKeyRepositoryMock::new(AesSeal::unseal_from_static_file().unwrap())); + let file_io = Arc::new(TestStateFileIo::new(state_key_access)); + let state_repository_loader = TestStateRepositoryLoader::new(file_io); + let state_snapshot_repository = state_repository_loader + .load_snapshot_repository(STATE_SNAPSHOTS_CACHE_SIZE) + .unwrap(); + Arc::new(TestStateHandler::new(state_snapshot_repository)) +} + +fn update_state( + state_handler: &TestStateHandler, + shard: &ShardIdentifier, + kv_pair: (Vec, Vec), +) -> H256 { + let (lock, mut state_to_mutate) = state_handler.load_for_mutation(shard).unwrap(); + state_to_mutate.insert(kv_pair.0, kv_pair.1); + state_handler.write_after_mutation(state_to_mutate, lock, shard).unwrap() +} + +fn given_hello_world_state() -> StfState { + let key: Vec = "hello".encode(); + let value: Vec = "world".encode(); + let mut state = StfState::new(); + state.insert(key, value); + state +} + +fn given_initialized_shard(shard: &ShardIdentifier) -> Result<()> { + if shard_exists(&shard) { + purge_shard_dir(shard); + } + init_shard(&shard) +} + +fn number_of_files_in_shard_dir(shard: &ShardIdentifier) -> Result { + let shard_dir_path = shard_path(shard); + let files_in_dir = std::fs::read_dir(shard_dir_path).map_err(|e| Error::Other(e.into()))?; + Ok(files_in_dir.count()) +} diff --git a/core-primitives/stf-state-handler/src/tests.rs b/core-primitives/stf-state-handler/src/tests.rs deleted file mode 100644 index dc7bbd588b..0000000000 --- a/core-primitives/stf-state-handler/src/tests.rs +++ /dev/null @@ -1,175 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -#[cfg(all(not(feature = "std"), feature = "sgx"))] -use crate::sgx_reexport_prelude::*; - -use crate::{ - file_io::{encrypt, exists, init_shard, list_shards, load, write as state_write}, - global_file_state_handler::GlobalFileStateHandler, - handle_state::HandleState, -}; -use base58::ToBase58; -use codec::{Decode, Encode}; -use ita_stf::{State as StfState, StateType as StfStateType}; -use itp_settings::files::SHARDS_PATH; -use itp_types::{ShardIdentifier, H256}; -use sgx_externalities::SgxExternalitiesTrait; -use sp_core::hashing::blake2_256; -use std::{format, thread, vec::Vec}; - -// Fixme: Move this test to sgx-runtime: -// -// https://github.com/integritee-network/sgx-runtime/issues/23 -pub fn test_sgx_state_decode_encode_works() { - // given - let state = given_hello_world_state(); - - // when - let encoded_state = state.state.encode(); - let state2 = StfStateType::decode(&mut encoded_state.as_slice()).unwrap(); - - // then - assert_eq!(state.state, state2); -} - -pub fn test_encrypt_decrypt_state_type_works() { - // given - let state = given_hello_world_state(); - - // when - let encrypted = encrypt(state.state.encode()).unwrap(); - - let decrypted = encrypt(encrypted).unwrap(); - let decoded = StfStateType::decode(&mut decrypted.as_slice()).unwrap(); - - // then - assert_eq!(state.state, decoded); -} - -pub fn test_write_and_load_state_works() { - // given - ensure_no_empty_shard_directory_exists(); - - let state = given_hello_world_state(); - - let shard: ShardIdentifier = [94u8; 32].into(); - given_initialized_shard(&shard); - - // when - let _hash = state_write(state.clone(), &shard).unwrap(); - let result = load(&shard).unwrap(); - - // then - assert_eq!(state.state, result.state); - - // clean up - remove_shard_dir(&shard); -} - -pub fn test_ensure_subsequent_state_loads_have_same_hash() { - // given - ensure_no_empty_shard_directory_exists(); - - let shard: ShardIdentifier = [49u8; 32].into(); - given_initialized_shard(&shard); - - let state_handler = GlobalFileStateHandler; - - let (lock, initial_state) = state_handler.load_for_mutation(&shard).unwrap(); - state_handler.write(initial_state.clone(), lock, &shard).unwrap(); - - let state_loaded = state_handler.load_initialized(&shard).unwrap(); - - assert_eq!(hash_of(&initial_state.state), hash_of(&state_loaded.state)); - - // clean up - remove_shard_dir(&shard); -} - -fn hash_of(encodable: &T) -> H256 { - encodable.using_encoded(blake2_256).into() -} - -pub fn test_write_access_locks_read_until_finished() { - // here we want to test that a lock we obtain for - // mutating state locks out any read attempt that happens during that time - - // given - ensure_no_empty_shard_directory_exists(); - - let shard: ShardIdentifier = [47u8; 32].into(); - given_initialized_shard(&shard); - - let state_handler = GlobalFileStateHandler; - - let new_state_key = "my_new_state".encode(); - let (lock, mut state_to_mutate) = state_handler.load_for_mutation(&shard).unwrap(); - - // spawn a new thread that reads state - // this thread should be blocked until the write lock is released, i.e. until - // the new state is written. We can verify this, by trying to read that state variable - // that will be inserted further down below - let new_state_key_for_read = new_state_key.clone(); - let shard_for_read = shard.clone(); - let join_handle = thread::spawn(move || { - let state_handler = GlobalFileStateHandler; - let state_to_read = state_handler.load_initialized(&shard_for_read).unwrap(); - assert!(state_to_read.get(new_state_key_for_read.as_slice()).is_some()); - }); - - assert!(state_to_mutate.get(new_state_key.clone().as_slice()).is_none()); - state_to_mutate.insert(new_state_key, "mega_secret_value".encode()); - - let _hash = state_handler.write(state_to_mutate, lock, &shard).unwrap(); - - join_handle.join().unwrap(); - - // clean up - remove_shard_dir(&shard); -} - -fn ensure_no_empty_shard_directory_exists() { - // ensure no empty states are within directory (created with init-shard) - // otherwise an 'index out of bounds: the len is x but the index is x' - // error will be thrown - let shards = list_shards().unwrap(); - for shard in shards { - if !exists(&shard) { - init_shard(&shard).unwrap(); - } - } -} - -fn given_hello_world_state() -> StfState { - let key: Vec = "hello".encode(); - let value: Vec = "world".encode(); - let mut state = StfState::new(); - state.insert(key, value); - state -} - -fn given_initialized_shard(shard: &ShardIdentifier) { - if exists(&shard) { - remove_shard_dir(shard); - } - init_shard(&shard).unwrap(); -} - -fn remove_shard_dir(shard: &ShardIdentifier) { - std::fs::remove_dir_all(&format!("{}/{}", SHARDS_PATH, shard.encode().to_base58())).unwrap(); -} diff --git a/core-primitives/storage-verified/Cargo.toml b/core-primitives/storage-verified/Cargo.toml deleted file mode 100644 index a7b762bd14..0000000000 --- a/core-primitives/storage-verified/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "itp-storage-verifier" -version = "0.8.0" -edition = "2018" - -[dependencies] -thiserror = { version = "1.0", optional = true } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -derive_more = { version = "0.99.5" } - -# sgx enabled external libraries -thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linux/thiserror-sgx", tag = "sgx_1.1.3", optional = true } - -# sgx deps -sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } -sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } - -# substrate deps -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} - -# local deps -itp-storage = { path = "../storage", default-features = false } -itp-ocall-api = { path = "../ocall-api", default-features = false } -itp-types = { path = "../types", default-features = false } - -[features] -default = ["std"] -std = [ - "codec/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", - "itp-storage/std", - "itp-ocall-api/std", - "itp-types/std", - "thiserror", -] -sgx = [ - "sgx_tstd", - "itp-storage/sgx", - "itp-types/sgx", - "thiserror_sgx", -] diff --git a/core-primitives/storage-verified/src/lib.rs b/core-primitives/storage-verified/src/lib.rs deleted file mode 100644 index 08c472bfc5..0000000000 --- a/core-primitives/storage-verified/src/lib.rs +++ /dev/null @@ -1,120 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -//! Basic storage access abstraction - -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(all(feature = "std", feature = "sgx"))] -compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the same time"); - -#[cfg(all(not(feature = "std"), feature = "sgx"))] -extern crate sgx_tstd as std; -// re-export module to properly feature gate sgx and regular std environment -#[cfg(all(not(feature = "std"), feature = "sgx"))] -pub mod sgx_reexport_prelude { - pub use thiserror_sgx as thiserror; -} - -#[cfg(all(not(feature = "std"), feature = "sgx"))] -use crate::sgx_reexport_prelude::*; - -use codec::Decode; -use core::result::Result as StdResult; -use itp_ocall_api::EnclaveOnChainOCallApi; -use itp_storage::{verify_storage_entries, Error as StorageError, StorageEntryVerified}; -use itp_types::WorkerRequest; -use sp_core::H256; -use sp_runtime::traits::Header; -use sp_std::{prelude::*, vec}; -use std::format; - -/// Very basic abstraction over storage access that returns a `StorageEntryVerified`. This enforces -/// that the implementation of this trait uses the `itp_storage::VerifyStorageProof` trait -/// because a `StorageEntryVerified` instance cannot be created otherwise. -/// -/// This is very generic and most-likely one of the innermost traits. -pub trait GetStorageVerified { - fn get_storage_verified, V: Decode>( - &self, - storage_hash: Vec, - header: &H, - ) -> Result>; - - fn get_multiple_storages_verified, V: Decode>( - &self, - storage_hashes: Vec>, - header: &H, - ) -> Result>>; -} - -impl GetStorageVerified for O { - fn get_storage_verified, V: Decode>( - &self, - storage_hash: Vec, - header: &H, - ) -> Result> { - // the code below seems like an overkill, but it is surprisingly difficult to - // get an owned value from a `Vec` without cloning. - Ok(self - .get_multiple_storages_verified(vec![storage_hash], header)? - .into_iter() - .next() - .ok_or(StorageError::StorageValueUnavailable)?) - } - - fn get_multiple_storages_verified, V: Decode>( - &self, - storage_hashes: Vec>, - header: &H, - ) -> Result>> { - let requests = storage_hashes - .into_iter() - .map(|key| WorkerRequest::ChainStorage(key, Some(header.hash()))) - .collect(); - - let storage_entries = self - .worker_request::>(requests) - .map(|storages| verify_storage_entries(storages, header))??; - - Ok(storage_entries) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("Storage error: {0}")] - Storage(#[from] StorageError), - #[error("SGX error, status: {0}")] - Sgx(sgx_types::sgx_status_t), - #[error("Error, other: {0}")] - Other(#[from] Box), -} - -impl From for Error { - fn from(sgx_status: sgx_types::sgx_status_t) -> Self { - Self::Sgx(sgx_status) - } -} - -impl From for Error { - fn from(e: codec::Error) -> Self { - Self::Other(format!("{:?}", e).into()) - } -} - -pub type Result = StdResult; diff --git a/core-primitives/storage/Cargo.toml b/core-primitives/storage/Cargo.toml index b7bf8a4b49..1c5a300dff 100644 --- a/core-primitives/storage/Cargo.toml +++ b/core-primitives/storage/Cargo.toml @@ -6,8 +6,9 @@ edition = "2018" resolver = "2" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["chain-error"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["chain-error"] } derive_more = { version = "0.99.5" } +frame-metadata = { version = "15.0.0", features = ["v14"], default-features = false} hash-db = { version = "0.15.2", default-features = false } thiserror = { version = "1.0.26", optional = true } @@ -16,15 +17,14 @@ thiserror-sgx = { package = "thiserror", git = "https://github.com/mesalock-linu sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } # substrate deps -frame-metadata = { version = "14.0.0", features = ["v13"], default-features = false, git = "https://github.com/paritytech/frame-metadata.git", branch = "main" } -frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-trie = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-trie = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [dev-dependencies] -sp-state-machine = { version = "0.11.0", git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-state-machine = { version = "0.12.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] default = ["std"] diff --git a/core-primitives/storage/src/keys.rs b/core-primitives/storage/src/keys.rs index 19aeff183d..43de4f667e 100644 --- a/core-primitives/storage/src/keys.rs +++ b/core-primitives/storage/src/keys.rs @@ -16,7 +16,7 @@ */ use codec::Encode; -use frame_metadata::v13::StorageHasher; +use frame_metadata::v14::StorageHasher; use sp_std::vec::Vec; pub fn storage_value_key(module_prefix: &str, storage_prefix: &str) -> Vec { diff --git a/core-primitives/storage/src/lib.rs b/core-primitives/storage/src/lib.rs index 844af54c29..71cd075da9 100644 --- a/core-primitives/storage/src/lib.rs +++ b/core-primitives/storage/src/lib.rs @@ -24,7 +24,7 @@ compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the sam extern crate sgx_tstd as std; pub use error::Error; -pub use frame_metadata::v13::StorageHasher; +pub use frame_metadata::v14::StorageHasher; pub use keys::*; pub use proof::*; pub use storage_entry::*; diff --git a/core-primitives/teerex-storage/Cargo.toml b/core-primitives/teerex-storage/Cargo.toml index 14bf8bb205..f06a6db286 100644 --- a/core-primitives/teerex-storage/Cargo.toml +++ b/core-primitives/teerex-storage/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" resolver = "2" [dependencies] -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} #local deps itp-storage = { path = "../storage", default-features = false } diff --git a/core-primitives/test/Cargo.toml b/core-primitives/test/Cargo.toml index 4c0e184939..7ff9e76b48 100644 --- a/core-primitives/test/Cargo.toml +++ b/core-primitives/test/Cargo.toml @@ -4,7 +4,7 @@ version = "0.8.0" edition = "2018" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } derive_more = { version = "0.99.5" } # sgx deps @@ -18,9 +18,9 @@ jsonrpc-core_sgx = { package = "jsonrpc-core", git = "https://github.com/scs/jso jsonrpc-core = { version = "18", optional = true } # substrate deps -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # local deps ita-stf = { path = "../../app-libs/stf", default-features = false } @@ -30,9 +30,7 @@ itp-teerex-storage = { path = "../teerex-storage", default-features = false } itp-time-utils = { path = "../time-utils", default-features = false } itp-types = { path = "../types", default-features = false } itp-ocall-api = { path = "../ocall-api", default-features = false } -itp-storage-verifier = { path = "../storage-verified", default-features = false } itp-storage = { path = "../storage", default-features = false, features = ["test"] } -its-top-pool = { path = "../../sidechain/top-pool", default-features = false } [features] default = ["std"] @@ -49,8 +47,6 @@ std = [ "itp-time-utils/std", "itp-types/std", "itp-ocall-api/std", - "itp-storage-verifier/std", - "its-top-pool/std", "jsonrpc-core", ] sgx = [ @@ -60,8 +56,6 @@ sgx = [ "itp-time-utils/sgx", "itp-sgx-crypto/sgx", "itp-stf-state-handler/sgx", - "itp-storage-verifier/sgx", - "its-top-pool/sgx", "jsonrpc-core_sgx", "sgx-externalities/sgx", ] diff --git a/core-primitives/test/src/mock/handle_state_mock.rs b/core-primitives/test/src/mock/handle_state_mock.rs index b514bbb8f4..c33ea7f4d7 100644 --- a/core-primitives/test/src/mock/handle_state_mock.rs +++ b/core-primitives/test/src/mock/handle_state_mock.rs @@ -22,13 +22,13 @@ use std::sync::{SgxRwLock as RwLock, SgxRwLockWriteGuard as RwLockWriteGuard}; use std::sync::{RwLock, RwLockWriteGuard}; use codec::Encode; -use ita_stf::{ShardIdentifier, State as StfState}; +use ita_stf::State as StfState; use itp_stf_state_handler::{ error::{Error, Result}, handle_state::HandleState, query_shard_state::QueryShardState, }; -use itp_types::H256; +use itp_types::{ShardIdentifier, H256}; use sp_core::blake2_256; use std::{collections::HashMap, format, vec::Vec}; @@ -40,52 +40,72 @@ pub struct HandleStateMock { state_map: RwLock>, } +impl HandleStateMock { + pub fn from_shard(shard: ShardIdentifier) -> Result { + let state_handler = HandleStateMock { state_map: Default::default() }; + state_handler.initialize_shard(shard)?; + Ok(state_handler) + } +} + impl HandleState for HandleStateMock { type WriteLockPayload = HashMap; type StateT = StfState; + type HashType = H256; - fn load_initialized(&self, shard: &ShardIdentifier) -> Result { - let maybe_state = self.state_map.read().unwrap().get(shard).cloned(); + fn initialize_shard(&self, shard: ShardIdentifier) -> Result { + let maybe_state = self.state_map.read().unwrap().get(&shard).cloned(); return match maybe_state { // Initialize with default state, if it doesn't exist yet. None => { - self.state_map.write().unwrap().insert(*shard, StfState::default()); - - self.state_map.read().unwrap().get(shard).cloned().ok_or_else(|| { - Error::Other( - format!("state does not exist after inserting it, shard {:?}", shard) - .into(), - ) - }) + let state = StfState::default(); + let state_hash = state.using_encoded(blake2_256).into(); + self.state_map.write().unwrap().insert(shard, state); + Ok(state_hash) }, - Some(s) => Ok(s), + Some(s) => Ok(s.using_encoded(blake2_256).into()), } } + fn load(&self, shard: &ShardIdentifier) -> Result { + self.state_map + .read() + .unwrap() + .get(shard) + .cloned() + .ok_or_else(|| Error::Other(format!("shard is not initialized {:?}", shard).into())) + } + fn load_for_mutation( &self, shard: &ShardIdentifier, ) -> Result<(RwLockWriteGuard<'_, Self::WriteLockPayload>, StfState)> { - let initialized_state = self.load_initialized(shard)?; + let initialized_state = self.load(shard)?; let write_lock = self.state_map.write().unwrap(); Ok((write_lock, initialized_state)) } - fn write( + fn write_after_mutation( &self, state: StfState, mut state_lock: RwLockWriteGuard<'_, Self::WriteLockPayload>, shard: &ShardIdentifier, - ) -> Result { + ) -> Result { state_lock.insert(*shard, state.clone()); Ok(state.using_encoded(blake2_256).into()) } + + fn reset(&self, state: Self::StateT, shard: &ShardIdentifier) -> Result { + let write_lock = self.state_map.write().unwrap(); + self.write_after_mutation(state, write_lock, shard) + } } impl QueryShardState for HandleStateMock { - fn exists(&self, shard: &ShardIdentifier) -> bool { - self.state_map.read().unwrap().get(shard).is_some() + fn shard_exists(&self, shard: &ShardIdentifier) -> Result { + let state_map_lock = self.state_map.read().map_err(|_| Error::LockPoisoning)?; + Ok(state_map_lock.get(shard).is_some()) } fn list_shards(&self) -> Result> { @@ -112,15 +132,18 @@ pub mod tests { pub fn shard_exists_after_inserting() { let state_handler = HandleStateMock::default(); let shard = ShardIdentifier::default(); - let _loaded_state_result = state_handler.load_initialized(&shard); - assert!(state_handler.exists(&shard)); + state_handler.initialize_shard(shard).unwrap(); + + assert!(state_handler.load(&shard).is_ok()); + assert!(state_handler.shard_exists(&shard).unwrap()); } - pub fn load_initialized_inserts_default_state() { + pub fn initialize_creates_default_state() { let state_handler = HandleStateMock::default(); let shard = ShardIdentifier::default(); + state_handler.initialize_shard(shard).unwrap(); - let loaded_state_result = state_handler.load_initialized(&shard); + let loaded_state_result = state_handler.load(&shard); assert!(loaded_state_result.is_ok()); } @@ -128,33 +151,33 @@ pub mod tests { pub fn load_mutate_and_write_works() { let state_handler = HandleStateMock::default(); let shard = ShardIdentifier::default(); + state_handler.initialize_shard(shard).unwrap(); let (lock, mut state) = state_handler.load_for_mutation(&shard).unwrap(); let (key, value) = ("my_key", "my_value"); state.insert(key.encode(), value.encode()); - state_handler.write(state, lock, &shard).unwrap(); + state_handler.write_after_mutation(state, lock, &shard).unwrap(); - let updated_state = state_handler.load_initialized(&shard).unwrap(); + let updated_state = state_handler.load(&shard).unwrap(); let inserted_value = updated_state.get(key.encode().as_slice()).expect("value for key should exist"); assert_eq!(*inserted_value, value.encode()); } - // This is the same test as for the `GlobalFileStateHandler` to ensure we don't have any effects - // from having the state in-memory (as here) vs. in file (`GlobalFileStateHandler`). pub fn ensure_subsequent_state_loads_have_same_hash() { let state_handler = HandleStateMock::default(); let shard = ShardIdentifier::default(); + state_handler.initialize_shard(shard).unwrap(); let (lock, _) = state_handler.load_for_mutation(&shard).unwrap(); let initial_state = Stf::init_state(); let state_hash_before_execution = hash_of(&initial_state.state); - state_handler.write(initial_state, lock, &shard).unwrap(); + state_handler.write_after_mutation(initial_state, lock, &shard).unwrap(); - let state_loaded = state_handler.load_initialized(&shard).unwrap(); + let state_loaded = state_handler.load(&shard).unwrap(); let loaded_state_hash = hash_of(&state_loaded.state); assert_eq!(state_hash_before_execution, loaded_state_hash); diff --git a/core-primitives/test/src/mock/mod.rs b/core-primitives/test/src/mock/mod.rs index af1edf0fd0..7af21bbf29 100644 --- a/core-primitives/test/src/mock/mod.rs +++ b/core-primitives/test/src/mock/mod.rs @@ -17,10 +17,8 @@ pub mod handle_state_mock; pub mod metrics_ocall_mock; -pub mod ocall_api_mock; pub mod onchain_mock; pub mod sidechain_ocall_api_mock; -pub mod trusted_operation_pool_mock; #[cfg(feature = "sgx")] pub mod shielding_crypto_mock; diff --git a/core-primitives/test/src/mock/ocall_api_mock.rs b/core-primitives/test/src/mock/ocall_api_mock.rs deleted file mode 100644 index 0d943fe415..0000000000 --- a/core-primitives/test/src/mock/ocall_api_mock.rs +++ /dev/null @@ -1,164 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - Copyright (C) 2017-2019 Baidu, Inc. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -use codec::{Decode, Encode}; -use core::fmt::Debug; -use itp_ocall_api::{ - EnclaveAttestationOCallApi, EnclaveMetricsOCallApi, EnclaveOnChainOCallApi, - EnclaveSidechainOCallApi, -}; -use itp_teerex_storage::{TeeRexStorage, TeerexStorageKeys}; -use itp_types::{BlockHash, Enclave, ShardIdentifier, WorkerRequest, WorkerResponse}; -use sgx_types::{ - sgx_epid_group_id_t, sgx_measurement_t, sgx_platform_info_t, sgx_quote_nonce_t, - sgx_quote_sign_type_t, sgx_report_t, sgx_spid_t, sgx_target_info_t, sgx_update_info_bit_t, - SgxResult, SGX_HASH_SIZE, -}; -use sp_runtime::{AccountId32, OpaqueExtrinsic}; -use sp_std::prelude::*; -use std::collections::HashMap; - -/// This struct is the same as OnchainMock but -/// implements EnclaveOnChainOCallApi instead of GetStorageVerified. -#[derive(Default, Clone, Debug)] -pub struct OcallApiMock { - inner: HashMap, Vec>, - mr_enclave: [u8; SGX_HASH_SIZE], -} - -impl OcallApiMock { - pub fn with_storage_entries(mut self, entries: Vec<(Vec, V)>) -> Self { - for (k, v) in entries.into_iter() { - self.inner.insert(k, v.encode()); - } - self - } - - pub fn with_validateer_set(mut self, set: Option>) -> Self { - let set = set.unwrap_or_else(validateer_set); - self.inner.insert(TeeRexStorage::enclave_count(), (set.len() as u64).encode()); - self.with_storage_entries(into_key_value_storage(set)) - } - - pub fn with_mr_enclave(mut self, mr_enclave: [u8; SGX_HASH_SIZE]) -> Self { - self.mr_enclave = mr_enclave; - self - } - - pub fn insert(&mut self, key: Vec, value: Vec) { - self.inner.insert(key, value); - } - - pub fn get(&self, key: &[u8]) -> Option<&Vec> { - self.inner.get(key) - } -} - -impl EnclaveAttestationOCallApi for OcallApiMock { - fn sgx_init_quote(&self) -> SgxResult<(sgx_target_info_t, sgx_epid_group_id_t)> { - todo!() - } - - fn get_ias_socket(&self) -> SgxResult { - Ok(42) - } - - fn get_quote( - &self, - _sig_rl: Vec, - _report: sgx_report_t, - _sign_type: sgx_quote_sign_type_t, - _spid: sgx_spid_t, - _quote_nonce: sgx_quote_nonce_t, - ) -> SgxResult<(sgx_report_t, Vec)> { - todo!() - } - - fn get_update_info( - &self, - _platform_info: sgx_platform_info_t, - _enclave_trusted: i32, - ) -> SgxResult { - todo!() - } - - fn get_mrenclave_of_self(&self) -> SgxResult { - Ok(sgx_measurement_t { m: self.mr_enclave }) - } -} - -impl EnclaveSidechainOCallApi for OcallApiMock { - fn propose_sidechain_blocks( - &self, - _signed_blocks: Vec, - ) -> SgxResult<()> { - Ok(()) - } - - fn store_sidechain_blocks( - &self, - _signed_blocks: Vec, - ) -> SgxResult<()> { - Ok(()) - } - - fn fetch_sidechain_blocks_from_peer( - &self, - _last_known_block_hash: BlockHash, - _shard_identifier: ShardIdentifier, - ) -> SgxResult> { - Ok(Vec::new()) - } -} - -impl EnclaveMetricsOCallApi for OcallApiMock { - fn update_metric(&self, _metric: Metric) -> SgxResult<()> { - Ok(()) - } -} - -impl EnclaveOnChainOCallApi for OcallApiMock { - fn send_to_parentchain(&self, _extrinsics: Vec) -> SgxResult<()> { - Ok(()) - } - - fn worker_request( - &self, - _req: Vec, - ) -> SgxResult>> { - Ok(Vec::new()) - } -} - -pub fn validateer_set() -> Vec { - let default_enclave = Enclave::new( - AccountId32::from([0; 32]), - Default::default(), - Default::default(), - Default::default(), - ); - vec![default_enclave.clone(), default_enclave.clone(), default_enclave.clone(), default_enclave] -} - -fn into_key_value_storage(validateers: Vec) -> Vec<(Vec, Enclave)> { - validateers - .into_iter() - .enumerate() - .map(|(i, e)| (TeeRexStorage::enclave(i as u64 + 1), e)) - .collect() -} diff --git a/core-primitives/test/src/mock/onchain_mock.rs b/core-primitives/test/src/mock/onchain_mock.rs index f241fbc632..a5dcbb64b7 100644 --- a/core-primitives/test/src/mock/onchain_mock.rs +++ b/core-primitives/test/src/mock/onchain_mock.rs @@ -18,18 +18,20 @@ use codec::{Decode, Encode}; use core::fmt::Debug; -use itp_ocall_api::{EnclaveAttestationOCallApi, EnclaveMetricsOCallApi, EnclaveSidechainOCallApi}; +use itp_ocall_api::{ + EnclaveAttestationOCallApi, EnclaveMetricsOCallApi, EnclaveOnChainOCallApi, + EnclaveSidechainOCallApi, +}; use itp_storage::StorageEntryVerified; -use itp_storage_verifier::{GetStorageVerified, Result}; use itp_teerex_storage::{TeeRexStorage, TeerexStorageKeys}; -use itp_types::{BlockHash, Enclave, ShardIdentifier}; +use itp_types::{BlockHash, Enclave, ShardIdentifier, WorkerRequest, WorkerResponse}; use sgx_types::{ sgx_epid_group_id_t, sgx_measurement_t, sgx_platform_info_t, sgx_quote_nonce_t, sgx_quote_sign_type_t, sgx_report_t, sgx_spid_t, sgx_target_info_t, sgx_update_info_bit_t, SgxResult, SGX_HASH_SIZE, }; use sp_core::H256; -use sp_runtime::{traits::Header as HeaderT, AccountId32}; +use sp_runtime::{traits::Header as HeaderT, AccountId32, OpaqueExtrinsic}; use sp_std::prelude::*; use std::collections::HashMap; @@ -67,36 +69,6 @@ impl OnchainMock { } } -impl GetStorageVerified for OnchainMock { - fn get_storage_verified, V: Decode>( - &self, - storage_hash: Vec, - _header: &H, - ) -> Result> { - let value = self - .get(&storage_hash) - .map(|val| Decode::decode(&mut val.as_slice())) - .transpose()?; - - Ok(StorageEntryVerified::new(storage_hash, value)) - } - - fn get_multiple_storages_verified, V: Decode>( - &self, - storage_hashes: Vec>, - _header: &H, - ) -> Result>> { - let mut entries = Vec::with_capacity(storage_hashes.len()); - for hash in storage_hashes.into_iter() { - let value = - self.get(&hash).map(|val| Decode::decode(&mut val.as_slice())).transpose()?; - - entries.push(StorageEntryVerified::new(hash, value)) - } - Ok(entries) - } -} - impl EnclaveAttestationOCallApi for OnchainMock { fn sgx_init_quote(&self) -> SgxResult<(sgx_target_info_t, sgx_epid_group_id_t)> { todo!() @@ -160,22 +132,50 @@ impl EnclaveMetricsOCallApi for OnchainMock { } } -// We cannot implement EnclaveOnChainOCallApi specifically here, because OnchainMock already -// implements `GetStorageVerified`. And all implementers of `EnclaveOnChainOCallApi` automatically -// implement GetStorageVerified too (-> see `core-primitives/storage-verified/src/lib.rs`), -// so it results in duplicate implementations. -// impl EnclaveOnChainOCallApi for OnchainMock { -// fn send_to_parentchain(&self, _extrinsics: Vec) -> SgxResult<()> { -// Ok(()) -// } -// -// fn worker_request( -// &self, -// _req: Vec, -// ) -> SgxResult>> { -// Ok(Vec::new()) -// } -// } +impl EnclaveOnChainOCallApi for OnchainMock { + fn send_to_parentchain(&self, _extrinsics: Vec) -> SgxResult<()> { + Ok(()) + } + + fn worker_request( + &self, + _req: Vec, + ) -> SgxResult>> { + Ok(Vec::new()) + } + + fn get_storage_verified, V: Decode>( + &self, + storage_hash: Vec, + _header: &H, + ) -> Result, itp_ocall_api::Error> { + let value = self + .get(&storage_hash) + .map(|val| Decode::decode(&mut val.as_slice())) + .transpose() + .map_err(|e| itp_ocall_api::Error::Codec(e))?; + + Ok(StorageEntryVerified::new(storage_hash, value)) + } + + fn get_multiple_storages_verified, V: Decode>( + &self, + storage_hashes: Vec>, + _header: &H, + ) -> Result>, itp_ocall_api::Error> { + let mut entries = Vec::with_capacity(storage_hashes.len()); + for hash in storage_hashes.into_iter() { + let value = self + .get(&hash) + .map(|val| Decode::decode(&mut val.as_slice())) + .transpose() + .map_err(|e| itp_ocall_api::Error::Codec(e))?; + + entries.push(StorageEntryVerified::new(hash, value)) + } + Ok(entries) + } +} pub fn validateer_set() -> Vec { let default_enclave = Enclave::new( diff --git a/core-primitives/time-utils/src/lib.rs b/core-primitives/time-utils/src/lib.rs index 0244ed6041..6ae6f348af 100644 --- a/core-primitives/time-utils/src/lib.rs +++ b/core-primitives/time-utils/src/lib.rs @@ -31,6 +31,11 @@ pub fn now_as_u64() -> u64 { duration_now().as_millis() as u64 } +/// Returns the current timestamp based on the unix epoch in nanoseconds. +pub fn now_as_nanos() -> u128 { + duration_now().as_nanos() +} + /// Calculates the remaining time `until`. pub fn remaining_time(until: Duration) -> Option { until.checked_sub(duration_now()) diff --git a/sidechain/top-pool-rpc-author/Cargo.toml b/core-primitives/top-pool-author/Cargo.toml similarity index 65% rename from sidechain/top-pool-rpc-author/Cargo.toml rename to core-primitives/top-pool-author/Cargo.toml index 1919f36e22..47e10488a5 100644 --- a/sidechain/top-pool-rpc-author/Cargo.toml +++ b/core-primitives/top-pool-author/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "its-top-pool-rpc-author" +name = "itp-top-pool-author" version = "0.8.0" authors = ["Integritee AG "] edition = "2018" @@ -14,12 +14,11 @@ std = [ "itp-sgx-crypto/std", "itc-direct-rpc-server/std", "itc-tls-websocket-server/std", - "itp-component-container/std", "itp-enclave-metrics/std", "itp-ocall-api/std", "itp-stf-state-handler/std", + "itp-top-pool/std", "itp-types/std", - "its-top-pool/std", "jsonrpc-core", "log/std", "thiserror", @@ -31,15 +30,14 @@ sgx = [ "ita-stf/sgx", "itc-direct-rpc-server/sgx", "itc-tls-websocket-server/sgx", - "itp-component-container/sgx", "itp-enclave-metrics/sgx", "itp-sgx-crypto/sgx", "itp-stf-state-handler/sgx", + "itp-top-pool/sgx", "itp-types/sgx", - "its-top-pool/sgx", "thiserror_sgx", ] -test = [ "itp-test/sgx" ] +test = [ "itp-test/sgx", "itp-top-pool/mocks" ] [dependencies] # sgx dependencies @@ -51,14 +49,13 @@ sgx-crypto-helper = { branch = "master", git = "https://github.com/apache/teacla ita-stf = { path = "../../app-libs/stf", default-features = false } itc-direct-rpc-server = { path = "../../core/direct-rpc-server", default-features = false } itc-tls-websocket-server = { path = "../../core/tls-websocket-server", default-features = false } -itp-component-container = { path = "../../core-primitives/component-container", default-features = false } -itp-enclave-metrics = { path = "../../core-primitives/enclave-metrics", default-features = false } -itp-ocall-api = { path = "../../core-primitives/ocall-api", default-features = false } -itp-sgx-crypto = { path = "../../core-primitives/sgx/crypto", default-features = false } -itp-stf-state-handler = { path = "../../core-primitives/stf-state-handler", default-features = false } -itp-test = { path = "../../core-primitives/test", default-features = false, optional = true } -itp-types = { path = "../../core-primitives/types", default-features = false } -its-top-pool = { path = "../top-pool", default-features = false } +itp-enclave-metrics = { path = "../enclave-metrics", default-features = false } +itp-ocall-api = { path = "../ocall-api", default-features = false } +itp-sgx-crypto = { path = "../sgx/crypto", default-features = false } +itp-stf-state-handler = { path = "../stf-state-handler", default-features = false } +itp-test = { path = "../test", default-features = false, optional = true } +itp-top-pool = { path = "../top-pool", default-features = false } +itp-types = { path = "../types", default-features = false } # sgx enabled external libraries jsonrpc-core_sgx = { package = "jsonrpc-core", git = "https://github.com/scs/jsonrpc", branch = "no_std", default-features = false, optional = true } @@ -69,8 +66,8 @@ jsonrpc-core = { version = "18", optional = true } thiserror = { version = "1.0", optional = true } # no-std compatible libraries -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } derive_more = { version = "0.99.5" } log = { version = "0.4", default-features = false } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} diff --git a/sidechain/top-pool-rpc-author/src/api.rs b/core-primitives/top-pool-author/src/api.rs similarity index 99% rename from sidechain/top-pool-rpc-author/src/api.rs rename to core-primitives/top-pool-author/src/api.rs index 980f0f208e..5e16183f91 100644 --- a/sidechain/top-pool-rpc-author/src/api.rs +++ b/core-primitives/top-pool-author/src/api.rs @@ -23,11 +23,11 @@ use crate::sgx_reexport_prelude::*; use crate::error; use codec::Encode; use ita_stf::{Getter, ShardIdentifier, TrustedOperation as StfTrustedOperation}; -use itp_types::BlockHash as SidechainBlockHash; -use its_top_pool::{ +use itp_top_pool::{ pool::{ChainApi, ExtrinsicHash, NumberFor}, primitives::TrustedOperationSource, }; +use itp_types::BlockHash as SidechainBlockHash; use jsonrpc_core::futures::future::{ready, Future, Ready}; use log::*; use sp_runtime::{ diff --git a/sidechain/top-pool-rpc-author/src/author.rs b/core-primitives/top-pool-author/src/author.rs similarity index 94% rename from sidechain/top-pool-rpc-author/src/author.rs rename to core-primitives/top-pool-author/src/author.rs index 406ec76610..cdb714d9a4 100644 --- a/sidechain/top-pool-rpc-author/src/author.rs +++ b/core-primitives/top-pool-author/src/author.rs @@ -22,7 +22,7 @@ use crate::{ client_error::Error as ClientError, error::{Error as StateRpcError, Result}, top_filter::{AllowAllTopsFilter, Filter}, - traits::{AuthorApi, OnBlockCreated, SendState}, + traits::{AuthorApi, OnBlockImported, SendState}, }; use codec::{Decode, Encode}; use ita_stf::{hash, Getter, TrustedCallSigned, TrustedGetterSigned, TrustedOperation}; @@ -30,14 +30,14 @@ use itp_enclave_metrics::EnclaveMetric; use itp_ocall_api::EnclaveMetricsOCallApi; use itp_sgx_crypto::ShieldingCrypto; use itp_stf_state_handler::query_shard_state::QueryShardState; -use itp_types::{BlockHash as SidechainBlockHash, ShardIdentifier}; -use its_top_pool::{ +use itp_top_pool::{ error::{Error as PoolError, IntoPoolError}, primitives::{ BlockHash, InPoolOperation, PoolFuture, TrustedOperationPool, TrustedOperationSource, TxHash, }, }; +use itp_types::{BlockHash as SidechainBlockHash, ShardIdentifier}; use jsonrpc_core::{ futures::future::{ready, TryFutureExt}, Error as RpcError, @@ -114,11 +114,14 @@ where shard: ShardIdentifier, submission_mode: TopSubmissionMode, ) -> PoolFuture, RpcError> { - // check if shard already exists - if !self.state_facade.exists(&shard) { - //FIXME: Should this be an error? -> Issue error handling - return Box::pin(ready(Err(ClientError::InvalidShard.into()))) - } + // check if shard exists + match self.state_facade.shard_exists(&shard) { + Err(_) => return Box::pin(ready(Err(ClientError::InvalidShard.into()))), + Ok(shard_exists) => + if !shard_exists { + return Box::pin(ready(Err(ClientError::InvalidShard.into()))) + }, + }; // decrypt call let request_vec = match self.encryption_key.decrypt(ext.as_slice()) { @@ -277,7 +280,7 @@ where } } -impl OnBlockCreated +impl OnBlockImported for Author where TopPool: TrustedOperationPool + Sync + Send + 'static, @@ -288,8 +291,8 @@ where { type Hash = ::Hash; - fn on_block_created(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash) { - self.top_pool.on_block_created(hashes, block_hash) + fn on_block_imported(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash) { + self.top_pool.on_block_imported(hashes, block_hash) } } diff --git a/sidechain/top-pool-rpc-author/src/author_tests.rs b/core-primitives/top-pool-author/src/author_tests.rs similarity index 96% rename from sidechain/top-pool-rpc-author/src/author_tests.rs rename to core-primitives/top-pool-author/src/author_tests.rs index a2bfef4d58..59b176a4e9 100644 --- a/sidechain/top-pool-rpc-author/src/author_tests.rs +++ b/core-primitives/top-pool-author/src/author_tests.rs @@ -30,8 +30,8 @@ use itp_stf_state_handler::handle_state::HandleState; use itp_test::mock::{ handle_state_mock::HandleStateMock, metrics_ocall_mock::MetricsOCallMock, shielding_crypto_mock::ShieldingCryptoMock, - trusted_operation_pool_mock::TrustedOperationPoolMock, }; +use itp_top_pool::mocks::trusted_operation_pool_mock::TrustedOperationPoolMock; use sgx_crypto_helper::{rsa3072::Rsa3072KeyPair, RsaKeyPair}; use sp_core::{ed25519, Pair, H256}; use sp_runtime::traits::{BlakeTwo256, Hash}; @@ -104,8 +104,8 @@ fn create_author_with_filter>( let top_pool = Arc::new(TrustedOperationPoolMock::default()); let shard_id = shard_id(); - let state_facade = HandleStateMock::default(); - let _ = state_facade.load_initialized(&shard_id).unwrap(); + let state_facade = HandleStateMock::from_shard(shard_id).unwrap(); + let _ = state_facade.load(&shard_id).unwrap(); let encryption_key = ShieldingCryptoMock::default(); let ocall_mock = Arc::new(MetricsOCallMock {}); diff --git a/sidechain/top-pool-rpc-author/src/client_error.rs b/core-primitives/top-pool-author/src/client_error.rs similarity index 98% rename from sidechain/top-pool-rpc-author/src/client_error.rs rename to core-primitives/top-pool-author/src/client_error.rs index f0cdb8a008..badd278008 100644 --- a/sidechain/top-pool-rpc-author/src/client_error.rs +++ b/core-primitives/top-pool-author/src/client_error.rs @@ -36,7 +36,7 @@ pub enum Error { Client(Box), /// TrustedOperation pool error, #[display(fmt = "TrustedOperation pool error: {}", _0)] - Pool(its_top_pool::error::Error), + Pool(itp_top_pool::error::Error), /// Verification error #[display(fmt = "Extrinsic verification error")] #[from(ignore)] @@ -107,7 +107,7 @@ const UNSUPPORTED_KEY_TYPE: i64 = POOL_INVALID_TX + 7; impl From for rpc_core::Error { fn from(e: Error) -> Self { - use its_top_pool::error::Error as PoolError; + use itp_top_pool::error::Error as PoolError; match e { Error::BadFormat => rpc_core::Error { diff --git a/sidechain/top-pool-rpc-author/src/error.rs b/core-primitives/top-pool-author/src/error.rs similarity index 97% rename from sidechain/top-pool-rpc-author/src/error.rs rename to core-primitives/top-pool-author/src/error.rs index 037bf69003..cde85e9d42 100644 --- a/sidechain/top-pool-rpc-author/src/error.rs +++ b/core-primitives/top-pool-author/src/error.rs @@ -21,7 +21,7 @@ use crate::sgx_reexport_prelude::*; use crate::client_error::Error as ClientError; use core::pin::Pin; use derive_more::{Display, From}; -use its_top_pool::error::{Error as PoolError, IntoPoolError}; +use itp_top_pool::error::{Error as PoolError, IntoPoolError}; use jsonrpc_core as rpc; use std::{boxed::Box, error, format, string::String}; diff --git a/sidechain/top-pool-rpc-author/src/lib.rs b/core-primitives/top-pool-author/src/lib.rs similarity index 97% rename from sidechain/top-pool-rpc-author/src/lib.rs rename to core-primitives/top-pool-author/src/lib.rs index f2cd493965..2014839c6a 100644 --- a/sidechain/top-pool-rpc-author/src/lib.rs +++ b/core-primitives/top-pool-author/src/lib.rs @@ -35,8 +35,6 @@ pub mod api; pub mod author; pub mod client_error; pub mod error; -pub mod initializer; -pub mod pool_types; pub mod top_filter; pub mod traits; diff --git a/sidechain/top-pool-rpc-author/src/test_utils.rs b/core-primitives/top-pool-author/src/test_utils.rs similarity index 95% rename from sidechain/top-pool-rpc-author/src/test_utils.rs rename to core-primitives/top-pool-author/src/test_utils.rs index 3ad7b08c0d..d740cbd624 100644 --- a/sidechain/top-pool-rpc-author/src/test_utils.rs +++ b/core-primitives/top-pool-author/src/test_utils.rs @@ -45,11 +45,11 @@ where /// Get all pending trusted operations, grouped into calls and getters pub fn get_pending_tops_separated( - rpc_author: &R, + top_pool_author: &R, shard: ShardIdentifier, ) -> (Vec, Vec) where R: AuthorApi, { - rpc_author.get_pending_tops_separated(shard).unwrap() + top_pool_author.get_pending_tops_separated(shard).unwrap() } diff --git a/sidechain/top-pool-rpc-author/src/top_filter.rs b/core-primitives/top-pool-author/src/top_filter.rs similarity index 100% rename from sidechain/top-pool-rpc-author/src/top_filter.rs rename to core-primitives/top-pool-author/src/top_filter.rs diff --git a/sidechain/top-pool-rpc-author/src/traits.rs b/core-primitives/top-pool-author/src/traits.rs similarity index 93% rename from sidechain/top-pool-rpc-author/src/traits.rs rename to core-primitives/top-pool-author/src/traits.rs index 84099b707c..5c13cc9002 100644 --- a/sidechain/top-pool-rpc-author/src/traits.rs +++ b/core-primitives/top-pool-author/src/traits.rs @@ -20,15 +20,15 @@ use crate::sgx_reexport_prelude::*; use crate::error::Result; use ita_stf::{hash, TrustedCallSigned, TrustedGetterSigned, TrustedOperation}; +use itp_top_pool::primitives::PoolFuture; use itp_types::{BlockHash as SidechainBlockHash, ShardIdentifier, H256}; -use its_top_pool::primitives::PoolFuture; use jsonrpc_core::Error as RpcError; use std::vec::Vec; /// Trait alias for a full STF author API pub trait FullAuthor = AuthorApi + SendState - + OnBlockCreated + + OnBlockImported + Send + Sync + 'static; @@ -75,8 +75,8 @@ pub trait SendState { } /// Trait to notify listeners/observer of a newly created block -pub trait OnBlockCreated { +pub trait OnBlockImported { type Hash; - fn on_block_created(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash); + fn on_block_imported(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash); } diff --git a/sidechain/top-pool/Cargo.toml b/core-primitives/top-pool/Cargo.toml similarity index 84% rename from sidechain/top-pool/Cargo.toml rename to core-primitives/top-pool/Cargo.toml index 58356d5f30..10b7844701 100644 --- a/sidechain/top-pool/Cargo.toml +++ b/core-primitives/top-pool/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "its-top-pool" +name = "itp-top-pool" version = "0.8.0" authors = ["Integritee AG "] edition = "2018" @@ -33,6 +33,7 @@ std = [ "sp-application-crypto/std", "thiserror", ] +mocks = [] [dependencies] # sgx dependencies @@ -57,15 +58,15 @@ thiserror = { version = "1.0", optional = true } # no-std compatible libraries byteorder = { version = "1.4.2", default-features = false } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } derive_more = { version = "0.99.5" } log = { version = "0.4", default-features = false } retain_mut = { version = "0.1.2"} serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } -sp-application-crypto = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-application-crypto = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # dev dependencies (for tests) [dev-dependencies] -parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.11.0", default-features = false, features = ["primitive-types"] } diff --git a/sidechain/top-pool/src/base_pool.rs b/core-primitives/top-pool/src/base_pool.rs similarity index 100% rename from sidechain/top-pool/src/base_pool.rs rename to core-primitives/top-pool/src/base_pool.rs diff --git a/sidechain/top-pool/src/basic_pool.rs b/core-primitives/top-pool/src/basic_pool.rs similarity index 97% rename from sidechain/top-pool/src/basic_pool.rs rename to core-primitives/top-pool/src/basic_pool.rs index 51e27fcfe8..e1af0eaf92 100644 --- a/sidechain/top-pool/src/basic_pool.rs +++ b/core-primitives/top-pool/src/basic_pool.rs @@ -230,8 +230,8 @@ where self.pool.validated_pool().ready_by_hash(hash, shard) } - fn on_block_created(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash) { - self.pool.validated_pool().on_block_created(hashes, block_hash); + fn on_block_imported(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash) { + self.pool.validated_pool().on_block_imported(hashes, block_hash); } fn rpc_send_state(&self, hash: Self::Hash, state_encoded: Vec) -> Result<(), Error> { diff --git a/sidechain/top-pool/src/error.rs b/core-primitives/top-pool/src/error.rs similarity index 100% rename from sidechain/top-pool/src/error.rs rename to core-primitives/top-pool/src/error.rs diff --git a/sidechain/top-pool/src/future.rs b/core-primitives/top-pool/src/future.rs similarity index 100% rename from sidechain/top-pool/src/future.rs rename to core-primitives/top-pool/src/future.rs diff --git a/sidechain/top-pool/src/lib.rs b/core-primitives/top-pool/src/lib.rs similarity index 97% rename from sidechain/top-pool/src/lib.rs rename to core-primitives/top-pool/src/lib.rs index f3329e2c92..4daa4b4284 100644 --- a/sidechain/top-pool/src/lib.rs +++ b/core-primitives/top-pool/src/lib.rs @@ -46,3 +46,6 @@ pub mod watcher; #[cfg(test)] mod mocks; + +#[cfg(feature = "mocks")] +pub mod mocks; diff --git a/sidechain/top-pool/src/listener.rs b/core-primitives/top-pool/src/listener.rs similarity index 100% rename from sidechain/top-pool/src/listener.rs rename to core-primitives/top-pool/src/listener.rs diff --git a/core-primitives/top-pool/src/mocks/mod.rs b/core-primitives/top-pool/src/mocks/mod.rs new file mode 100644 index 0000000000..fd973f675c --- /dev/null +++ b/core-primitives/top-pool/src/mocks/mod.rs @@ -0,0 +1,22 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(test)] +pub mod rpc_responder_mock; + +#[cfg(feature = "mocks")] +pub mod trusted_operation_pool_mock; diff --git a/sidechain/top-pool/src/mocks/rpc_responder_mock.rs b/core-primitives/top-pool/src/mocks/rpc_responder_mock.rs similarity index 100% rename from sidechain/top-pool/src/mocks/rpc_responder_mock.rs rename to core-primitives/top-pool/src/mocks/rpc_responder_mock.rs diff --git a/core-primitives/test/src/mock/trusted_operation_pool_mock.rs b/core-primitives/top-pool/src/mocks/trusted_operation_pool_mock.rs similarity index 96% rename from core-primitives/test/src/mock/trusted_operation_pool_mock.rs rename to core-primitives/top-pool/src/mocks/trusted_operation_pool_mock.rs index 446351718a..0d98004cf5 100644 --- a/core-primitives/test/src/mock/trusted_operation_pool_mock.rs +++ b/core-primitives/top-pool/src/mocks/trusted_operation_pool_mock.rs @@ -25,11 +25,7 @@ use std::sync::SgxRwLock as RwLock; #[cfg(feature = "std")] use std::sync::RwLock; -use codec::Encode; -use core::{future::Future, pin::Pin}; -use ita_stf::{ShardIdentifier, TrustedOperation as StfTrustedOperation}; -use itp_types::{Block, BlockHash as SidechainBlockHash, H256}; -use its_top_pool::{ +use crate::{ base_pool::TrustedOperation, error::Error, primitives::{ @@ -37,6 +33,10 @@ use its_top_pool::{ TrustedOperationSource, TxHash, }, }; +use codec::Encode; +use core::{future::Future, pin::Pin}; +use ita_stf::TrustedOperation as StfTrustedOperation; +use itp_types::{Block, BlockHash as SidechainBlockHash, ShardIdentifier, H256}; use jsonrpc_core::futures::future::ready; use sp_runtime::{ generic::BlockId, @@ -211,7 +211,7 @@ impl TrustedOperationPool for TrustedOperationPoolMock { unimplemented!() } - fn on_block_created(&self, _hashes: &[Self::Hash], _block_hash: SidechainBlockHash) {} + fn on_block_imported(&self, _hashes: &[Self::Hash], _block_hash: SidechainBlockHash) {} fn rpc_send_state(&self, _hash: Self::Hash, _state_encoded: Vec) -> Result<(), Error> { Ok(()) diff --git a/sidechain/top-pool/src/pool.rs b/core-primitives/top-pool/src/pool.rs similarity index 100% rename from sidechain/top-pool/src/pool.rs rename to core-primitives/top-pool/src/pool.rs diff --git a/sidechain/top-pool/src/primitives.rs b/core-primitives/top-pool/src/primitives.rs similarity index 99% rename from sidechain/top-pool/src/primitives.rs rename to core-primitives/top-pool/src/primitives.rs index ff3e75cc95..f14129d13d 100644 --- a/sidechain/top-pool/src/primitives.rs +++ b/core-primitives/top-pool/src/primitives.rs @@ -258,7 +258,7 @@ pub trait TrustedOperationPool: Send + Sync { ) -> Option>; /// Notify the listener of top inclusion in sidechain block - fn on_block_created(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash); + fn on_block_imported(&self, hashes: &[Self::Hash], block_hash: SidechainBlockHash); /// Notify the RPC client of a state update fn rpc_send_state(&self, hash: Self::Hash, state_encoded: Vec) -> Result<(), error::Error>; diff --git a/sidechain/top-pool/src/ready.rs b/core-primitives/top-pool/src/ready.rs similarity index 100% rename from sidechain/top-pool/src/ready.rs rename to core-primitives/top-pool/src/ready.rs diff --git a/sidechain/top-pool/src/rotator.rs b/core-primitives/top-pool/src/rotator.rs similarity index 100% rename from sidechain/top-pool/src/rotator.rs rename to core-primitives/top-pool/src/rotator.rs diff --git a/sidechain/top-pool/src/tracked_map.rs b/core-primitives/top-pool/src/tracked_map.rs similarity index 100% rename from sidechain/top-pool/src/tracked_map.rs rename to core-primitives/top-pool/src/tracked_map.rs diff --git a/sidechain/top-pool/src/validated_pool.rs b/core-primitives/top-pool/src/validated_pool.rs similarity index 99% rename from sidechain/top-pool/src/validated_pool.rs rename to core-primitives/top-pool/src/validated_pool.rs index 289de3cefb..3b51d83a7b 100644 --- a/sidechain/top-pool/src/validated_pool.rs +++ b/core-primitives/top-pool/src/validated_pool.rs @@ -688,7 +688,7 @@ where } /// Notify the listener of top inclusion in sidechain block - pub fn on_block_created(&self, hashes: &[ExtrinsicHash], block_hash: SidechainBlockHash) { + pub fn on_block_imported(&self, hashes: &[ExtrinsicHash], block_hash: SidechainBlockHash) { for top_hash in hashes.iter() { self.listener.write().unwrap().in_block(top_hash, block_hash); } diff --git a/sidechain/top-pool/src/watcher.rs b/core-primitives/top-pool/src/watcher.rs similarity index 100% rename from sidechain/top-pool/src/watcher.rs rename to core-primitives/top-pool/src/watcher.rs diff --git a/core-primitives/types/Cargo.toml b/core-primitives/types/Cargo.toml index fb215d1cff..b1e53e4ede 100644 --- a/core-primitives/types/Cargo.toml +++ b/core-primitives/types/Cargo.toml @@ -5,12 +5,12 @@ authors = ["Integritee AG "] edition = "2018" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -primitive-types = { version = "0.10.1", default-features = false, features = ["codec"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } +primitive-types = { version = "0.11.1", default-features = false, features = ["codec"] } chrono = { version = "0.4.19", default-features = false, features = ["alloc"]} serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master", default-features = false } +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19", default-features = false } sgx_tstd = { branch = "master", features = ["untrusted_fs","net","backtrace"], git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true} @@ -18,12 +18,12 @@ sgx_tstd = { branch = "master", features = ["untrusted_fs","net","backtrace"], g itp-storage = { path = "../storage", default-features = false } # substrate-deps -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } # node deps -pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } [features] default = ['std'] @@ -41,4 +41,4 @@ std = [ 'codec/std', sgx = [ 'sgx_tstd' ] [dev-dependencies] -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} diff --git a/core/direct-rpc-server/Cargo.toml b/core/direct-rpc-server/Cargo.toml index 0b6d8b2fd2..75be632b3c 100644 --- a/core/direct-rpc-server/Cargo.toml +++ b/core/direct-rpc-server/Cargo.toml @@ -29,10 +29,10 @@ sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true, features = ["net", "thread"] } # no-std dependencies -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } # internal dependencies itp-types = { path = "../../core-primitives/types", default-features = false } diff --git a/core/direct-rpc-server/src/lib.rs b/core/direct-rpc-server/src/lib.rs index 345b4e7339..c7087e09be 100644 --- a/core/direct-rpc-server/src/lib.rs +++ b/core/direct-rpc-server/src/lib.rs @@ -35,11 +35,11 @@ use crate::sgx_reexport_prelude::*; use crate::rpc_watch_extractor::RpcWatchExtractor; use codec::{Encode, Error as CodecError}; -use itc_tls_websocket_server::{WebSocketConnection, WebSocketError}; +use itc_tls_websocket_server::error::WebSocketError; use itp_types::{RpcResponse, TrustedOperationStatus}; use serde_json::error::Error as SerdeJsonError; use sp_runtime::traits; -use std::{fmt::Debug, vec::Vec}; +use std::{boxed::Box, fmt::Debug, vec::Vec}; #[cfg(test)] mod mocks; @@ -47,9 +47,10 @@ mod mocks; #[cfg(test)] mod builders; +pub mod response_channel; pub mod rpc_connection_registry; pub mod rpc_responder; -mod rpc_watch_extractor; +pub mod rpc_watch_extractor; pub mod rpc_ws_handler; /// General web-socket error type @@ -60,9 +61,11 @@ pub enum DirectRpcError { #[error("RPC serialization error: {0}")] SerializationError(SerdeJsonError), #[error("Web socket error: {0}")] - WebSocketError(WebSocketError), + WebSocketError(#[from] WebSocketError), #[error("Encoding error: {0}")] EncodingError(CodecError), + #[error("Other error: {0}")] + Other(Box), } pub type DirectRpcResult = Result; @@ -71,17 +74,17 @@ pub type DirectRpcResult = Result; pub trait RpcHash: std::hash::Hash + traits::Member + Encode {} impl RpcHash for T {} -/// registry for RPC connections (i.e. connections that are kept alive to send updates) +/// Registry for RPC connections (i.e. connections that are kept alive to send updates). pub trait RpcConnectionRegistry: Send + Sync { type Hash: RpcHash; - type Connection: WebSocketConnection; + type Connection: Copy; fn store(&self, hash: Self::Hash, connection: Self::Connection, rpc_response: RpcResponse); fn withdraw(&self, hash: &Self::Hash) -> Option<(Self::Connection, RpcResponse)>; } -/// sends an RPC response back to the client +/// Sends an RPC response back to the client. pub trait SendRpcResponse: Send + Sync { type Hash: RpcHash; @@ -94,15 +97,15 @@ pub trait SendRpcResponse: Send + Sync { fn send_state(&self, hash: Self::Hash, state_encoded: Vec) -> DirectRpcResult<()>; } -/// determines if a given connection must be watched (i.e. kept alive), -/// based on the information in the RpcResponse -pub trait DetermineWatch { +/// Determines if a given connection must be watched (i.e. kept alive), +/// based on the information in the RpcResponse. +pub trait DetermineWatch: Send + Sync { type Hash: RpcHash; fn must_be_watched(&self, rpc_response: &RpcResponse) -> DirectRpcResult>; } -/// convenience method to create a do_watch extractor +/// Convenience method to create a do_watch extractor. pub fn create_determine_watch() -> RpcWatchExtractor where Hash: RpcHash, diff --git a/core/direct-rpc-server/src/mocks/connection_mock.rs b/core/direct-rpc-server/src/mocks/connection_mock.rs deleted file mode 100644 index 3d0cc46d36..0000000000 --- a/core/direct-rpc-server/src/mocks/connection_mock.rs +++ /dev/null @@ -1,108 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -use crate::mocks::updates_sink::UpdatesSink; -use itc_tls_websocket_server::{WebSocketConnection, WebSocketResult}; -use std::{string::String, sync::Arc}; - -pub struct ConnectionMock { - name: String, - input: Option, - maybe_updates_sink: Option>, - is_closed: bool, -} - -impl ConnectionMock { - pub fn builder() -> ConnectionMockBuilder { - ConnectionMockBuilder::new() - } - - pub fn name(&self) -> &String { - &self.name - } - - pub fn is_closed(&self) -> bool { - self.is_closed - } -} - -impl WebSocketConnection for ConnectionMock { - fn process_request(&mut self, initial_call: F) -> WebSocketResult - where - F: Fn(&str) -> String, - { - match &self.input { - Some(i) => Ok((initial_call)(i.as_str())), - None => Ok("processed".to_string()), - } - } - - fn send_update(&mut self, message: &str) -> WebSocketResult<()> { - if let Some(updates_sink) = self.maybe_updates_sink.as_ref() { - updates_sink.push_update(String::from(message)); - } - Ok(()) - } - - fn close(&mut self) { - self.is_closed = true; - } -} - -/// builder pattern for the connection mock -pub struct ConnectionMockBuilder { - maybe_name: Option, - maybe_input: Option, - maybe_is_closed: Option, - maybe_updates_sink: Option>, -} - -impl ConnectionMockBuilder { - /// use with ConnectionMock::builder() - fn new() -> Self { - ConnectionMockBuilder { - maybe_name: None, - maybe_input: None, - maybe_is_closed: None, - maybe_updates_sink: None, - } - } - - pub fn with_name(mut self, name: &str) -> Self { - self.maybe_name = Some(String::from(name)); - self - } - - pub fn with_input(mut self, input: &str) -> Self { - self.maybe_input = Some(String::from(input)); - self - } - - pub fn with_updates_sink(mut self, updates_sink: Arc) -> Self { - self.maybe_updates_sink = Some(updates_sink); - self - } - - pub fn build(self) -> ConnectionMock { - let name = self.maybe_name.unwrap_or("blank".to_string()); - let input = self.maybe_input; - let is_closed = self.maybe_is_closed.unwrap_or(false); - let updates_sink = self.maybe_updates_sink; - - ConnectionMock { name, input, maybe_updates_sink: updates_sink, is_closed } - } -} diff --git a/core/direct-rpc-server/src/mocks/mod.rs b/core/direct-rpc-server/src/mocks/mod.rs index 4a9e3b130e..8215f9c583 100644 --- a/core/direct-rpc-server/src/mocks/mod.rs +++ b/core/direct-rpc-server/src/mocks/mod.rs @@ -15,6 +15,5 @@ */ -pub mod connection_mock; pub mod determine_watch_mock; -pub mod updates_sink; +pub mod response_channel_mock; diff --git a/core/direct-rpc-server/src/mocks/updates_sink.rs b/core/direct-rpc-server/src/mocks/response_channel_mock.rs similarity index 51% rename from core/direct-rpc-server/src/mocks/updates_sink.rs rename to core/direct-rpc-server/src/mocks/response_channel_mock.rs index 020cde0c1a..6a612d6766 100644 --- a/core/direct-rpc-server/src/mocks/updates_sink.rs +++ b/core/direct-rpc-server/src/mocks/response_channel_mock.rs @@ -15,32 +15,41 @@ */ -#[cfg(all(not(feature = "std"), feature = "sgx"))] +#[cfg(feature = "sgx")] use std::sync::SgxRwLock as RwLock; #[cfg(feature = "std")] use std::sync::RwLock; -use std::{string::String, vec::Vec}; +use crate::{response_channel::ResponseChannel, DirectRpcError}; +use std::vec::Vec; -/// Struct to store the updates sent through a `Connection`. -/// This allows to have tests know what update messages were sent, -/// even if the connection is closed or otherwise discarded (which happens inside handler logic) -pub struct UpdatesSink { - received_updates: RwLock>, +#[derive(Default)] +pub struct ResponseChannelMock +where + Token: Copy + Send + Sync, +{ + sent_messages: RwLock>, } -impl UpdatesSink { - pub fn new() -> Self { - UpdatesSink { received_updates: RwLock::new(Vec::new()) } +impl ResponseChannelMock +where + Token: Copy + Send + Sync, +{ + pub fn number_of_updates(&self) -> usize { + self.sent_messages.read().unwrap().len() } +} - pub fn push_update(&self, update: String) { - let mut updates = self.received_updates.write().unwrap(); - updates.push(update); - } +impl ResponseChannel for ResponseChannelMock +where + Token: Copy + Send + Sync, +{ + type Error = DirectRpcError; - pub fn number_of_updates(&self) -> usize { - self.received_updates.read().unwrap().len() + fn respond(&self, token: Token, message: String) -> Result<(), Self::Error> { + let mut messages_lock = self.sent_messages.write().unwrap(); + messages_lock.push((token, message)); + Ok(()) } } diff --git a/core/direct-rpc-server/src/response_channel.rs b/core/direct-rpc-server/src/response_channel.rs new file mode 100644 index 0000000000..b1fe6a3fea --- /dev/null +++ b/core/direct-rpc-server/src/response_channel.rs @@ -0,0 +1,26 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::DirectRpcError; +use std::string::String; + +/// Response / status update channel for an RPC call. +pub trait ResponseChannel: Send + Sync { + type Error: Into; + + fn respond(&self, token: Token, message: String) -> Result<(), Self::Error>; +} diff --git a/core/direct-rpc-server/src/rpc_connection_registry.rs b/core/direct-rpc-server/src/rpc_connection_registry.rs index a4bb9f198e..344278fba5 100644 --- a/core/direct-rpc-server/src/rpc_connection_registry.rs +++ b/core/direct-rpc-server/src/rpc_connection_registry.rs @@ -22,27 +22,23 @@ use std::sync::SgxRwLock as RwLock; use std::sync::RwLock; use crate::{RpcConnectionRegistry, RpcHash}; -use itc_tls_websocket_server::WebSocketConnection; use itp_types::RpcResponse; use std::collections::HashMap; type HashMapLock = RwLock>; -pub struct ConnectionRegistry +pub struct ConnectionRegistry where Hash: RpcHash, - Connection: WebSocketConnection, + Token: Copy + Send + Sync, { - connection_map: HashMapLock< - ::Hash, - (::Connection, RpcResponse), - >, + connection_map: HashMapLock<::Hash, (Token, RpcResponse)>, } -impl ConnectionRegistry +impl ConnectionRegistry where Hash: RpcHash, - Connection: WebSocketConnection, + Token: Copy + Send + Sync, { pub fn new() -> Self { Self::default() @@ -54,23 +50,23 @@ where } } -impl Default for ConnectionRegistry +impl Default for ConnectionRegistry where Hash: RpcHash, - Connection: WebSocketConnection, + Token: Copy + Send + Sync, { fn default() -> Self { ConnectionRegistry { connection_map: RwLock::new(HashMap::default()) } } } -impl RpcConnectionRegistry for ConnectionRegistry +impl RpcConnectionRegistry for ConnectionRegistry where Hash: RpcHash, - Connection: WebSocketConnection, + Token: Copy + Send + Sync, { type Hash = Hash; - type Connection = Connection; + type Connection = Token; fn store(&self, hash: Self::Hash, connection: Self::Connection, rpc_response: RpcResponse) { let mut map = self.connection_map.write().unwrap(); @@ -86,10 +82,8 @@ where #[cfg(test)] pub mod tests { use super::*; - use crate::mocks::connection_mock::ConnectionMock; - type TestConnection = ConnectionMock; - type TestRegistry = ConnectionRegistry; + type TestRegistry = ConnectionRegistry; #[test] pub fn adding_element_with_same_hash_overwrite() { @@ -97,19 +91,11 @@ pub mod tests { let hash = "first".to_string(); - registry.store( - hash.clone(), - ConnectionMock::builder().with_name("this_connection").build(), - dummy_rpc_response(), - ); - registry.store( - hash.clone(), - ConnectionMock::builder().with_name("other_connection").build(), - dummy_rpc_response(), - ); - - let connection = registry.withdraw(&hash).unwrap().0; - assert_eq!("other_connection".to_string(), *connection.name()); + registry.store(hash.clone(), 1, dummy_rpc_response()); + registry.store(hash.clone(), 2, dummy_rpc_response()); + + let connection_token = registry.withdraw(&hash).unwrap().0; + assert_eq!(2, connection_token); } #[test] @@ -124,7 +110,7 @@ pub mod tests { let registry = TestRegistry::new(); let hash = "first".to_string(); - registry.store(hash.clone(), ConnectionMock::builder().build(), dummy_rpc_response()); + registry.store(hash.clone(), 1, dummy_rpc_response()); let connection = registry.withdraw(&hash); diff --git a/core/direct-rpc-server/src/rpc_responder.rs b/core/direct-rpc-server/src/rpc_responder.rs index 4f85ad53ba..e04b4560a4 100644 --- a/core/direct-rpc-server/src/rpc_responder.rs +++ b/core/direct-rpc-server/src/rpc_responder.rs @@ -15,36 +15,56 @@ */ -use crate::{DirectRpcError, DirectRpcResult, RpcConnectionRegistry, RpcHash, SendRpcResponse}; +use crate::{ + response_channel::ResponseChannel, DirectRpcError, DirectRpcResult, RpcConnectionRegistry, + RpcHash, SendRpcResponse, +}; use codec::{Decode, Encode}; -use itc_tls_websocket_server::WebSocketConnection; use itp_types::{DirectRequestStatus, RpcResponse, RpcReturnValue, TrustedOperationStatus}; use log::*; use std::{sync::Arc, vec::Vec}; -pub struct RpcResponder +pub struct RpcResponder where - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, Hash: RpcHash, + ResponseChannelType: ResponseChannel, { connection_registry: Arc, + response_channel: Arc, } -impl RpcResponder +impl RpcResponder where - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, Hash: RpcHash, + ResponseChannelType: ResponseChannel, { - pub fn new(connection_registry: Arc) -> Self { - RpcResponder { connection_registry } + pub fn new( + connection_registry: Arc, + web_socket_responder: Arc, + ) -> Self { + RpcResponder { connection_registry, response_channel: web_socket_responder } + } + + fn encode_and_send_response( + &self, + connection: Registry::Connection, + rpc_response: &RpcResponse, + ) -> DirectRpcResult<()> { + let string_response = + serde_json::to_string(&rpc_response).map_err(DirectRpcError::SerializationError)?; + + self.response_channel.respond(connection, string_response).map_err(|e| e.into()) } } -impl SendRpcResponse for RpcResponder +impl SendRpcResponse + for RpcResponder where - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, Hash: RpcHash, - Connection: WebSocketConnection, + ResponseChannelType: ResponseChannel, { type Hash = Hash; @@ -56,7 +76,7 @@ where debug!("updating status event"); // withdraw removes it from the registry - let (mut connection, rpc_response) = self + let (connection_token, rpc_response) = self .connection_registry .withdraw(&hash) .ok_or(DirectRpcError::InvalidConnectionHash)?; @@ -73,13 +93,10 @@ where result.status = DirectRequestStatus::TrustedOperationStatus(status_update); new_response.result = result.encode(); - encode_and_send_response(&mut connection, &new_response)?; + self.encode_and_send_response(connection_token, &new_response)?; if do_watch { - self.connection_registry.store(hash, connection, new_response); - } else { - debug!("closing connection"); - connection.close(); + self.connection_registry.store(hash, connection_token, new_response); } debug!("updating status event successful"); @@ -90,7 +107,7 @@ where debug!("sending state"); // withdraw removes it from the registry - let (mut connection, mut response) = self + let (connection_token, mut response) = self .connection_registry .withdraw(&hash) .ok_or(DirectRpcError::InvalidConnectionHash)?; @@ -104,28 +121,15 @@ where // update response response.result = result.encode(); - encode_and_send_response(&mut connection, &response)?; + self.encode_and_send_response(connection_token, &response)?; - debug!("closing connection"); - connection.close(); + self.connection_registry.store(hash, connection_token, response); debug!("sending state successful"); Ok(()) } } -fn encode_and_send_response( - connection: &mut Connection, - rpc_response: &RpcResponse, -) -> DirectRpcResult<()> { - let string_response = - serde_json::to_string(&rpc_response).map_err(DirectRpcError::SerializationError)?; - - connection - .send_update(string_response.as_str()) - .map_err(DirectRpcError::WebSocketError) -} - fn continue_watching(status: &TrustedOperationStatus) -> bool { !matches!( status, @@ -142,18 +146,20 @@ pub mod tests { use super::*; use crate::{ builders::rpc_response_builder::RpcResponseBuilder, - mocks::{connection_mock::ConnectionMock, updates_sink::UpdatesSink}, + mocks::response_channel_mock::ResponseChannelMock, rpc_connection_registry::ConnectionRegistry, }; - use core::assert_matches::assert_matches; + use std::assert_matches::assert_matches; - type TestConnection = ConnectionMock; - type TestConnectionRegistry = ConnectionRegistry; + type TestConnectionToken = u64; + type TestResponseChannel = ResponseChannelMock; + type TestConnectionRegistry = ConnectionRegistry; #[test] fn given_empty_registry_when_updating_status_event_then_return_error() { - let connection_registry = Arc::new(ConnectionRegistry::::new()); - let rpc_responder = RpcResponder::new(connection_registry); + let connection_registry = Arc::new(TestConnectionRegistry::new()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = RpcResponder::new(connection_registry, websocket_responder); assert_matches!( rpc_responder @@ -164,8 +170,9 @@ pub mod tests { #[test] fn given_empty_registry_when_sending_state_then_return_error() { - let connection_registry = Arc::new(ConnectionRegistry::::new()); - let rpc_responder = RpcResponder::new(connection_registry); + let connection_registry = Arc::new(TestConnectionRegistry::new()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = RpcResponder::new(connection_registry, websocket_responder); assert_matches!( rpc_responder.send_state("hash".to_string(), vec![1u8, 2u8]), @@ -176,26 +183,29 @@ pub mod tests { #[test] fn updating_status_event_with_finalized_state_removes_connection() { let connection_hash = String::from("conn_hash"); - let (connection_registry, updates_sink) = - create_registry_with_single_connection(connection_hash.clone()); + let connection_registry = create_registry_with_single_connection(connection_hash.clone()); - let rpc_responder = RpcResponder::new(connection_registry.clone()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = + RpcResponder::new(connection_registry.clone(), websocket_responder.clone()); let result = rpc_responder .update_status_event(connection_hash.clone(), TrustedOperationStatus::Finalized); assert!(result.is_ok()); - assert!(connection_registry.withdraw(&connection_hash).is_none()); - assert_eq!(1, updates_sink.number_of_updates()); + + verify_closed_connection(&connection_hash, connection_registry); + assert_eq!(1, websocket_responder.number_of_updates()); } #[test] fn updating_status_event_with_ready_state_keeps_connection_and_sends_update() { let connection_hash = String::from("conn_hash"); - let (connection_registry, updates_sink) = - create_registry_with_single_connection(connection_hash.clone()); + let connection_registry = create_registry_with_single_connection(connection_hash.clone()); - let rpc_responder = RpcResponder::new(connection_registry.clone()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = + RpcResponder::new(connection_registry.clone(), websocket_responder.clone()); let first_result = rpc_responder .update_status_event(connection_hash.clone(), TrustedOperationStatus::Ready); @@ -207,41 +217,42 @@ pub mod tests { assert!(second_result.is_ok()); verify_open_connection(&connection_hash, connection_registry); - assert_eq!(2, updates_sink.number_of_updates()); + assert_eq!(2, websocket_responder.number_of_updates()); } #[test] - fn sending_state_successfully_sends_update_and_closes_connection() { + fn sending_state_successfully_sends_update_and_keeps_connection_open() { let connection_hash = String::from("conn_hash"); - let (connection_registry, updates_sink) = - create_registry_with_single_connection(connection_hash.clone()); + let connection_registry = create_registry_with_single_connection(connection_hash.clone()); - let rpc_responder = RpcResponder::new(connection_registry.clone()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = + RpcResponder::new(connection_registry.clone(), websocket_responder.clone()); let result = rpc_responder.send_state(connection_hash.clone(), "new_state".encode()); assert!(result.is_ok()); - verify_closed_connection(&connection_hash, connection_registry); - assert_eq!(1, updates_sink.number_of_updates()); + verify_open_connection(&connection_hash, connection_registry); + assert_eq!(1, websocket_responder.number_of_updates()); } #[test] - fn sending_state_twice_fails_the_second_time() { + fn sending_state_twice_works() { let connection_hash = String::from("conn_hash"); - let (connection_registry, updates_sink) = - create_registry_with_single_connection(connection_hash.clone()); + let connection_registry = create_registry_with_single_connection(connection_hash.clone()); - let rpc_responder = RpcResponder::new(connection_registry.clone()); + let websocket_responder = Arc::new(TestResponseChannel::default()); + let rpc_responder = + RpcResponder::new(connection_registry.clone(), websocket_responder.clone()); let first_result = rpc_responder.send_state(connection_hash.clone(), "new_state".encode()); assert!(first_result.is_ok()); - // cannot send_state twice, since it closes the connection automatically after the first send let second_result = rpc_responder.send_state(connection_hash.clone(), "new_state_2".encode()); - assert!(!second_result.is_ok()); + assert!(second_result.is_ok()); - assert_eq!(1, updates_sink.number_of_updates()); + assert_eq!(2, websocket_responder.number_of_updates()); } #[test] @@ -258,11 +269,7 @@ pub mod tests { connection_registry: Arc, ) { let maybe_connection = connection_registry.withdraw(&connection_hash); - assert!(maybe_connection.is_some()); - let connection = maybe_connection.unwrap().0; - - assert_eq!(false, connection.is_closed()); } fn verify_closed_connection( @@ -274,14 +281,11 @@ pub mod tests { fn create_registry_with_single_connection( connection_hash: String, - ) -> (Arc, Arc) { - let connection_registry = ConnectionRegistry::::new(); - let updates_sink = Arc::new(UpdatesSink::new()); - - let connection = TestConnection::builder().with_updates_sink(updates_sink.clone()).build(); + ) -> Arc { + let connection_registry = TestConnectionRegistry::new(); let rpc_response = RpcResponseBuilder::new().with_id(2).build(); - connection_registry.store(connection_hash.clone(), connection, rpc_response); - (Arc::new(connection_registry), updates_sink) + connection_registry.store(connection_hash.clone(), 1, rpc_response); + Arc::new(connection_registry) } } diff --git a/core/direct-rpc-server/src/rpc_ws_handler.rs b/core/direct-rpc-server/src/rpc_ws_handler.rs index 2ba2b90eae..db658ef6b4 100644 --- a/core/direct-rpc-server/src/rpc_ws_handler.rs +++ b/core/direct-rpc-server/src/rpc_ws_handler.rs @@ -18,35 +18,31 @@ #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; -extern crate alloc; - use crate::{DetermineWatch, DirectRpcError, RpcConnectionRegistry, RpcHash}; -use alloc::boxed::Box; use itc_tls_websocket_server::{ - WebSocketConnection, WebSocketError, WebSocketHandler, WebSocketResult, + error::{WebSocketError, WebSocketResult}, + ConnectionToken, WebSocketMessageHandler, }; use jsonrpc_core::IoHandler; use log::*; -use std::sync::Arc; +use std::{boxed::Box, string::String, sync::Arc}; -pub struct RpcWsHandler +pub struct RpcWsHandler where Watcher: DetermineWatch, - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, Hash: RpcHash, - Connection: WebSocketConnection, { rpc_io_handler: IoHandler, connection_watcher: Arc, connection_registry: Arc, } -impl RpcWsHandler +impl RpcWsHandler where Watcher: DetermineWatch, - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, Hash: RpcHash, - Connection: WebSocketConnection, { pub fn new( rpc_io_handler: IoHandler, @@ -57,37 +53,40 @@ where } } -impl WebSocketHandler - for RpcWsHandler +impl WebSocketMessageHandler for RpcWsHandler where Watcher: DetermineWatch, - Registry: RpcConnectionRegistry, + Registry: RpcConnectionRegistry, + Registry::Connection: From, Hash: RpcHash, - Connection: WebSocketConnection, { - type Connection = Connection; - - fn handle(&self, mut connection: Connection) -> WebSocketResult<()> { - let rpc_response_string = connection.process_request(|request| { - self.rpc_io_handler.handle_request_sync(request).unwrap_or_default() - })?; + fn handle_message( + &self, + connection_token: ConnectionToken, + message: String, + ) -> WebSocketResult> { + let maybe_rpc_response = self.rpc_io_handler.handle_request_sync(message.as_str()); - debug!("RPC response string: {}", rpc_response_string); + debug!("RPC response string: {:?}", maybe_rpc_response); - let rpc_response = serde_json::from_str(&rpc_response_string).map_err(|e| { - WebSocketError::HandlerError(Box::new(DirectRpcError::SerializationError(e))) - })?; + let rpc_response = serde_json::from_str( + maybe_rpc_response.clone().unwrap_or_default().as_str(), + ) + .map_err(|e| WebSocketError::Other(Box::new(DirectRpcError::SerializationError(e))))?; match self.connection_watcher.must_be_watched(&rpc_response) { - Ok(maybe_connection_hash) => { + Ok(maybe_connection_hash) => if let Some(connection_hash) = maybe_connection_hash { - debug!("current connection is kept alive"); - self.connection_registry.store(connection_hash, connection, rpc_response); - } - Ok(()) - }, - Err(e) => Err(WebSocketError::HandlerError(Box::new(e))), + self.connection_registry.store( + connection_hash, + connection_token.into(), + rpc_response, + ); + }, + Err(e) => return Err(WebSocketError::Other(Box::new(e))), } + + Ok(maybe_rpc_response) } } @@ -96,20 +95,19 @@ pub mod tests { use super::*; use crate::{ - mocks::{connection_mock::ConnectionMock, determine_watch_mock::DetermineWatchMock}, + mocks::determine_watch_mock::DetermineWatchMock, rpc_connection_registry::ConnectionRegistry, }; use codec::Encode; use core::assert_matches::assert_matches; + use itc_tls_websocket_server::ConnectionToken; use itp_types::{DirectRequestStatus, RpcReturnValue}; use jsonrpc_core::Params; use serde_json::json; - type TestConnection = ConnectionMock; - type TestConnectionRegistry = ConnectionRegistry; + type TestConnectionRegistry = ConnectionRegistry; type TestConnectionWatcher = DetermineWatchMock; - type TestWsHandler = - RpcWsHandler; + type TestWsHandler = RpcWsHandler; const RPC_METHOD_NAME: &str = "test_call"; @@ -117,11 +115,11 @@ pub mod tests { fn valid_rpc_call_without_watch_runs_successfully() { let io_handler = create_io_handler_with_method(RPC_METHOD_NAME); - let connection = create_connection(RPC_METHOD_NAME); + let (connection_token, message) = create_message_to_handle(RPC_METHOD_NAME); let (ws_handler, connection_registry) = create_ws_handler(io_handler, None); - let handle_result = ws_handler.handle(connection); + let handle_result = ws_handler.handle_message(connection_token, message); assert!(handle_result.is_ok()); assert!(connection_registry.is_empty()); @@ -132,12 +130,12 @@ pub mod tests { let io_handler = create_io_handler_with_method(RPC_METHOD_NAME); let connection_hash = String::from("connection_hash"); - let connection = create_connection(RPC_METHOD_NAME); + let (connection_token, message) = create_message_to_handle(RPC_METHOD_NAME); let (ws_handler, connection_registry) = create_ws_handler(io_handler, Some(connection_hash.clone())); - let handle_result = ws_handler.handle(connection); + let handle_result = ws_handler.handle_message(connection_token, message); assert!(handle_result.is_ok()); assert!(connection_registry.withdraw(&connection_hash).is_some()); @@ -148,12 +146,12 @@ pub mod tests { let io_handler = create_io_handler_with_error(RPC_METHOD_NAME); let connection_hash = String::from("connection_hash"); - let connection = create_connection(RPC_METHOD_NAME); + let (connection_token, message) = create_message_to_handle(RPC_METHOD_NAME); let (ws_handler, connection_registry) = create_ws_handler(io_handler, Some(connection_hash.clone())); - let handle_result = ws_handler.handle(connection); + let handle_result = ws_handler.handle_message(connection_token, message); assert!(handle_result.is_ok()); assert!(connection_registry.withdraw(&connection_hash).is_some()); @@ -162,24 +160,24 @@ pub mod tests { #[test] fn when_rpc_method_does_not_match_anything_return_error() { let io_handler = create_io_handler_with_error(RPC_METHOD_NAME); - let connection = create_connection("not_a_valid_method"); + let (connection_token, message) = create_message_to_handle("not_a_valid_method"); let (ws_handler, connection_registry) = create_ws_handler(io_handler, None); - let handle_result = ws_handler.handle(connection); + let handle_result = ws_handler.handle_message(connection_token, message); - assert_matches!(handle_result, Err(WebSocketError::HandlerError(_))); + assert_matches!(handle_result, Err(WebSocketError::Other(_))); assert!(connection_registry.is_empty()); } - fn create_connection(method_name: &str) -> ConnectionMock { + fn create_message_to_handle(method_name: &str) -> (ConnectionToken, String) { let json_rpc_pre_method = r#"{"jsonrpc": "2.0", "method": ""#; let json_rpc_post_method = r#"", "params": {}, "id": 1}"#; let json_string = format!("{}{}{}", json_rpc_pre_method, method_name, json_rpc_post_method); debug!("JSON input: {}", json_string); - TestConnection::builder().with_input(json_string.as_str()).build() + (ConnectionToken(23), json_string) } fn create_ws_handler( @@ -191,7 +189,7 @@ pub mod tests { None => TestConnectionWatcher::no_watch(), }; - let connection_registry = Arc::new(ConnectionRegistry::::new()); + let connection_registry = Arc::new(TestConnectionRegistry::new()); ( TestWsHandler::new(io_handler, Arc::new(watcher), connection_registry.clone()), diff --git a/core/parentchain/block-import-dispatcher/Cargo.toml b/core/parentchain/block-import-dispatcher/Cargo.toml index 7c909762a0..5aa3d8d333 100644 --- a/core/parentchain/block-import-dispatcher/Cargo.toml +++ b/core/parentchain/block-import-dispatcher/Cargo.toml @@ -22,7 +22,7 @@ thiserror = { version = "1.0", optional = true } # crates.io no-std compatible libraries log = { version = "0.4", default-features = false } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [dev-dependencies] itp-types = { path = "../../../core-primitives/types" } diff --git a/core/parentchain/block-import-dispatcher/src/triggered_dispatcher.rs b/core/parentchain/block-import-dispatcher/src/triggered_dispatcher.rs index 3185315905..3f1f5d34c2 100644 --- a/core/parentchain/block-import-dispatcher/src/triggered_dispatcher.rs +++ b/core/parentchain/block-import-dispatcher/src/triggered_dispatcher.rs @@ -103,6 +103,8 @@ where let latest_imported_block = blocks_to_import.last().map(|b| (*b).clone()); + debug!("Trigger import of all parentchain blocks in queue ({})", blocks_to_import.len()); + self.block_importer .import_parentchain_blocks(blocks_to_import) .map_err(Error::BlockImport)?; @@ -114,6 +116,11 @@ where let blocks_to_import = self.import_queue.pop_all_but_last().map_err(Error::BlockImportQueue)?; + debug!( + "Trigger import of all parentchain blocks, except the latest, from queue ({})", + blocks_to_import.len() + ); + self.block_importer .import_parentchain_blocks(blocks_to_import) .map_err(Error::BlockImport) diff --git a/core/parentchain/block-importer/Cargo.toml b/core/parentchain/block-importer/Cargo.toml index f22110d3c1..b31a145618 100644 --- a/core/parentchain/block-importer/Cargo.toml +++ b/core/parentchain/block-importer/Cargo.toml @@ -20,7 +20,6 @@ itp-registry-storage = { path = "../../../core-primitives/registry-storage", def itp-settings = { path = "../../../core-primitives/settings" } itp-stf-executor = { path = "../../../core-primitives/stf-executor", default-features = false } itp-stf-state-handler = { path = "../../../core-primitives/stf-state-handler", default-features = false } -itp-storage-verifier = { path = "../../../core-primitives/storage-verified", default-features = false } itp-types = { path = "../../../core-primitives/types", default-features = false } # sgx enabled external libraries @@ -30,14 +29,14 @@ thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linu thiserror = { version = "1.0", optional = true } # crates.io no-std compatible libraries -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -#beefy-merkle-tree = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master", features = "keccak" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +#beefy-merkle-tree = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19", features = "keccak" } #remove as soon as we can import beefy-merkle-tree: tiny-keccak = { version = "2.0.2", features = ["keccak"] } -pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +pallet-ajuna-gameregistry = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } [features] default = ["std"] @@ -52,7 +51,6 @@ std = [ "itp-stf-executor/std", "itp-stf-state-handler/std", "pallet-ajuna-gameregistry/std", - "itp-storage-verifier/std", "itp-types/std", # no-std compatible libraries "codec/std", @@ -71,7 +69,6 @@ sgx = [ "itp-extrinsics-factory/sgx", "itp-stf-executor/sgx", "itp-stf-state-handler/sgx", - "itp-storage-verifier/sgx", "itp-types/sgx", # sgx enabled external libraries "thiserror_sgx", diff --git a/core/parentchain/block-importer/src/block_importer.rs b/core/parentchain/block-importer/src/block_importer.rs index 792d78df54..f7ad7d849b 100644 --- a/core/parentchain/block-importer/src/block_importer.rs +++ b/core/parentchain/block-importer/src/block_importer.rs @@ -17,14 +17,11 @@ //! Imports parentchain blocks and executes any indirect calls found in the extrinsics. -use log::*; -use pallet_ajuna_gameregistry::{game::GameEngine, Queue}; -use sp_runtime::{ - generic::SignedBlock as SignedBlockG, - traits::{Block as ParentchainBlockTrait, NumberFor}, +use crate::{ + beefy_merkle_tree::{merkle_root, Keccak256}, + error::{Error, Result}, + ImportParentchainBlocks, }; -use std::{marker::PhantomData, sync::Arc, vec, vec::Vec}; - use ita_stf::ParentchainHeader; use itc_parentchain_indirect_calls_executor::ExecuteIndirectCalls; use itc_parentchain_light_client::{ @@ -38,14 +35,14 @@ use itp_settings::node::{ }; use itp_stf_executor::traits::{StfExecuteShieldFunds, StfExecuteTrustedCall, StfUpdateState}; use itp_stf_state_handler::query_shard_state::QueryShardState; -use itp_storage_verifier::GetStorageVerified; use itp_types::{OpaqueCall, H256}; - -use crate::{ - beefy_merkle_tree::{merkle_root, Keccak256}, - error::Result, - ImportParentchainBlocks, +use log::*; +use pallet_ajuna_gameregistry::{game::GameEngine, Queue}; +use sp_runtime::{ + generic::SignedBlock as SignedBlockG, + traits::{Block as ParentchainBlockTrait, NumberFor}, }; +use std::{format, marker::PhantomData, sync::Arc, vec::Vec}; /// Parentchain block import implementation. pub struct ParentchainBlockImporter< @@ -143,7 +140,7 @@ impl< ParentchainBlock: ParentchainBlockTrait, NumberFor: BlockNumberOps, ValidatorAccessor: ValidatorAccess, - OCallApi: EnclaveOnChainOCallApi + EnclaveAttestationOCallApi + GetStorageVerified, + OCallApi: EnclaveOnChainOCallApi + EnclaveAttestationOCallApi, StfExecutor: StfUpdateState + StfExecuteTrustedCall + StfExecuteShieldFunds, ExtrinsicsFactory: CreateExtrinsics, IndirectCallsExecutor: ExecuteIndirectCalls, @@ -197,12 +194,12 @@ impl< // FIXME: Putting these blocks below in a separate function would be a little bit cleaner let maybe_queue: Option> = self .ocall_api - .get_storage_verified(RegistryStorage::queue_game(), block.header())? + .get_storage_verified(RegistryStorage::queue_game(), block.header()) + .map_err(|e| Error::StorageVerified(format!("{:?}", e)))? .into_tuple() .1; match maybe_queue { Some(mut queue) => { - //FIXME: if this would be a separate function, we could return here upon if queue.is_empty() check. if !queue.is_empty() { //FIXME: hardcoded, because currently hardcoded in the GameRegistry pallet. let game_engine = GameEngine::new(1u8, 1u8); @@ -212,22 +209,14 @@ impl< } //FIXME: we currently only take the first shard. How we handle sharding in general? let shard = self.file_state_handler.list_shards()?[0]; - let opaque_call = OpaqueCall::from_tuple(&( + let ack_game_call = OpaqueCall::from_tuple(&( [GAME_REGISTRY_MODULE, ACK_GAME], &game_engine, games, shard, )); - let calls = vec![opaque_call]; - - // Create extrinsic for acknowledge game. - let ack_game_extrinsic = - self.extrinsics_factory.create_extrinsics(calls.as_slice())?; - // Sending the extrinsic requires mut access because the validator caches the sent extrinsics internally. - self.validator_accessor.execute_mut_on_validator(|v| { - v.send_extrinsics(self.ocall_api.as_ref(), ack_game_extrinsic) - })?; + calls.push(ack_game_call); } }, None => { diff --git a/core/parentchain/block-importer/src/error.rs b/core/parentchain/block-importer/src/error.rs index e4a25d93b4..89c054af61 100644 --- a/core/parentchain/block-importer/src/error.rs +++ b/core/parentchain/block-importer/src/error.rs @@ -19,7 +19,7 @@ use crate::sgx_reexport_prelude::*; use sgx_types::sgx_status_t; -use std::{boxed::Box, format}; +use std::{boxed::Box, format, string::String}; pub type Result = core::result::Result; @@ -35,7 +35,7 @@ pub enum Error { #[error("Light-client error: {0}")] LightClient(#[from] itc_parentchain_light_client::error::Error), #[error("Storage verified error: {0}")] - StorageVerified(#[from] itp_storage_verifier::Error), + StorageVerified(String), #[error("State handling error: {0}")] StateHandler(#[from] itp_stf_state_handler::error::Error), #[error(transparent)] diff --git a/core/parentchain/indirect-calls-executor/Cargo.toml b/core/parentchain/indirect-calls-executor/Cargo.toml index 9111ee31a7..e3972ff434 100644 --- a/core/parentchain/indirect-calls-executor/Cargo.toml +++ b/core/parentchain/indirect-calls-executor/Cargo.toml @@ -27,13 +27,13 @@ thiserror = { version = "1.0", optional = true } # no-std compatible libraries bs58 = { version = "0.4.0", default-features = false, features = ["alloc"] } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # scs/integritee -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master", default-features = false } +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19", default-features = false } [features] default = ["std"] diff --git a/core/parentchain/indirect-calls-executor/src/indirect_calls_executor.rs b/core/parentchain/indirect-calls-executor/src/indirect_calls_executor.rs index 692cca2ac2..570b951372 100644 --- a/core/parentchain/indirect-calls-executor/src/indirect_calls_executor.rs +++ b/core/parentchain/indirect-calls-executor/src/indirect_calls_executor.rs @@ -89,7 +89,7 @@ where info!("found {:?} games", games.len()); for game in games { - self.stf_executor.execute_new_game(game.clone(), shard, block)?; + self.stf_executor.execute_new_game(*game, shard, block)?; } Ok(()) } diff --git a/core/parentchain/light-client/Cargo.toml b/core/parentchain/light-client/Cargo.toml index bcbc9e87bb..519146acff 100644 --- a/core/parentchain/light-client/Cargo.toml +++ b/core/parentchain/light-client/Cargo.toml @@ -38,9 +38,9 @@ sgx = [ mocks = [] [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive", "chain-error"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "chain-error"] } derive_more = { version = "0.99.5" } -finality-grandpa = { version = "0.14.3", default-features = false, features = ["derive-codec"] } +finality-grandpa = { version = "0.15.0", default-features = false, features = ["derive-codec"] } hash-db = { version = "0.15.2", default-features = false } lazy_static = { version = "1.1.0", features = ["spin_no_std"] } log = { version = "0.4.14", default-features = false } @@ -60,9 +60,9 @@ itp-storage = { path = "../../../core-primitives/storage", default-features = fa itp-types = { path = "../../../core-primitives/types", default-features = false } # substrate deps -frame-system = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-application-crypto = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-trie = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } +frame-system = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-application-crypto = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-trie = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } diff --git a/core/parentchain/light-client/src/concurrent_access.rs b/core/parentchain/light-client/src/concurrent_access.rs index 8b6eb7b187..6efa925a6d 100644 --- a/core/parentchain/light-client/src/concurrent_access.rs +++ b/core/parentchain/light-client/src/concurrent_access.rs @@ -29,7 +29,7 @@ use crate::{ LightClientState, Validator as ValidatorTrait, }; use finality_grandpa::BlockNumberOps; -use itp_sgx_io::SealedIO; +use itp_sgx_io::StaticSealedIO; use lazy_static::lazy_static; use sp_runtime::traits::{Block as ParentchainBlockTrait, NumberFor}; use std::marker::PhantomData; @@ -69,7 +69,7 @@ where pub struct GlobalValidatorAccessor where Validator: ValidatorTrait + LightClientState, - Seal: SealedIO, + Seal: StaticSealedIO, ParentchainBlock: ParentchainBlockTrait, NumberFor: BlockNumberOps, { @@ -80,7 +80,7 @@ impl Default for GlobalValidatorAccessor where Validator: ValidatorTrait + LightClientState, - Seal: SealedIO, + Seal: StaticSealedIO, ParentchainBlock: ParentchainBlockTrait, NumberFor: BlockNumberOps, { @@ -92,7 +92,7 @@ where impl GlobalValidatorAccessor where Validator: ValidatorTrait + LightClientState, - Seal: SealedIO, + Seal: StaticSealedIO, ParentchainBlock: ParentchainBlockTrait, NumberFor: BlockNumberOps, { @@ -105,7 +105,7 @@ impl ValidatorAccess for GlobalValidatorAccessor where Validator: ValidatorTrait + LightClientState, - Seal: SealedIO, + Seal: StaticSealedIO, ParentchainBlock: ParentchainBlockTrait, NumberFor: BlockNumberOps, { @@ -116,7 +116,7 @@ where F: FnOnce(&Self::ValidatorType) -> Result, { let _read_lock = VALIDATOR_LOCK.read().map_err(|_| Error::PoisonedLock)?; - let validator = Seal::unseal()?; + let validator = Seal::unseal_from_static_file()?; getter_function(&validator) } @@ -125,9 +125,9 @@ where F: FnOnce(&mut Self::ValidatorType) -> Result, { let _write_lock = VALIDATOR_LOCK.write().map_err(|_| Error::PoisonedLock)?; - let mut validator = Seal::unseal()?; + let mut validator = Seal::unseal_from_static_file()?; let result = mutating_function(&mut validator); - Seal::seal(validator)?; + Seal::seal_to_static_file(validator)?; result } } diff --git a/core/parentchain/light-client/src/io.rs b/core/parentchain/light-client/src/io.rs index b8c2052cee..74850758d2 100644 --- a/core/parentchain/light-client/src/io.rs +++ b/core/parentchain/light-client/src/io.rs @@ -19,7 +19,7 @@ use crate::{error::Result, Error, LightClientState, LightValidation, NumberFor, use codec::{Decode, Encode}; use derive_more::Display; use itp_settings::files::LIGHT_CLIENT_DB; -use itp_sgx_io::{seal, unseal, SealedIO}; +use itp_sgx_io::{seal, unseal, StaticSealedIO}; use itp_storage::StorageProof; use log::*; use sp_finality_grandpa::VersionedAuthorityList; @@ -31,15 +31,15 @@ pub struct LightClientSeal { _phantom: B, } -impl SealedIO for LightClientSeal { +impl StaticSealedIO for LightClientSeal { type Error = Error; type Unsealed = LightValidation; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { Ok(unseal(LIGHT_CLIENT_DB).map(|b| Decode::decode(&mut b.as_slice()))??) } - fn seal(unsealed: Self::Unsealed) -> Result<()> { + fn seal_to_static_file(unsealed: Self::Unsealed) -> Result<()> { debug!("backup light client state"); if fs::copy(LIGHT_CLIENT_DB, format!("{}.1", LIGHT_CLIENT_DB)).is_err() { warn!("could not backup previous light client state"); @@ -62,7 +62,7 @@ where return init_validator::(header, auth, proof) } - let validator = LightClientSeal::::unseal()?; + let validator = LightClientSeal::::unseal_from_static_file()?; let genesis = validator.genesis_hash(validator.num_relays()).unwrap(); if genesis == header.hash() { @@ -85,7 +85,7 @@ where let mut validator = LightValidation::::new(); validator.initialize_relay(header, auth.into(), proof)?; - LightClientSeal::::seal(validator.clone())?; + LightClientSeal::::seal_to_static_file(validator.clone())?; Ok(validator.latest_finalized_header(validator.num_relays()).unwrap()) } diff --git a/core/parentchain/light-client/src/lib.rs b/core/parentchain/light-client/src/lib.rs index c41f26e163..0b7e0d22a5 100644 --- a/core/parentchain/light-client/src/lib.rs +++ b/core/parentchain/light-client/src/lib.rs @@ -57,13 +57,12 @@ pub mod state; #[cfg(all(not(feature = "std"), feature = "sgx"))] pub mod io; - -#[cfg(test)] -mod mocks; - #[cfg(feature = "mocks")] pub mod mocks; +#[cfg(all(not(feature = "mocks"), test))] +pub mod mocks; + pub type RelayId = u64; pub type AuthorityListRef<'a> = &'a [(AuthorityId, AuthorityWeight)]; diff --git a/core/parentchain/light-client/src/mocks/validator_mock_seal.rs b/core/parentchain/light-client/src/mocks/validator_mock_seal.rs index 03c7c27d82..7bf142cbfd 100644 --- a/core/parentchain/light-client/src/mocks/validator_mock_seal.rs +++ b/core/parentchain/light-client/src/mocks/validator_mock_seal.rs @@ -16,21 +16,21 @@ */ use crate::{error::Error, mocks::validator_mock::ValidatorMock}; -use itp_sgx_io::SealedIO; +use itp_sgx_io::StaticSealedIO; /// A seal that returns a mock validator. #[derive(Clone)] pub struct ValidatorMockSeal; -impl SealedIO for ValidatorMockSeal { +impl StaticSealedIO for ValidatorMockSeal { type Error = Error; type Unsealed = ValidatorMock; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { Ok(ValidatorMock) } - fn seal(_unsealed: Self::Unsealed) -> Result<(), Self::Error> { + fn seal_to_static_file(_unsealed: Self::Unsealed) -> Result<(), Self::Error> { Ok(()) } } diff --git a/core/rpc-client/Cargo.toml b/core/rpc-client/Cargo.toml index 347ce1eb23..a2bd7313db 100644 --- a/core/rpc-client/Cargo.toml +++ b/core/rpc-client/Cargo.toml @@ -6,16 +6,22 @@ edition = "2018" [dependencies] # crates.io -codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", features = ["derive"] } log = "0.4" openssl = { version = "0.10" } +parking_lot = "0.11.1" serde_derive = "1.0" serde_json = "1.0" sgx_crypto_helper = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } thiserror = { version = "1.0" } url = { version = "2.0.0" } ws = { version = "0.9.1", features = ["ssl"] } -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master" } # local itp-types = { path = "../../core-primitives/types" } + +[dev-dependencies] +env_logger = "0.9.0" +itc-tls-websocket-server = { path = "../tls-websocket-server", features = ["mocks"] } +rustls = { version = "0.19", features = ["dangerous_configuration"] } diff --git a/core/rpc-client/src/direct_client.rs b/core/rpc-client/src/direct_client.rs index a18409e63d..06f7e61fe4 100644 --- a/core/rpc-client/src/direct_client.rs +++ b/core/rpc-client/src/direct_client.rs @@ -17,13 +17,16 @@ //! Interface for direct access to a workers rpc. -use crate::ws_client::WsClient; +use crate::ws_client::{WsClient, WsClientControl}; use codec::Decode; use itp_types::{DirectRequestStatus, RpcRequest, RpcResponse, RpcReturnValue}; use log::*; use sgx_crypto_helper::rsa3072::Rsa3072PubKey; use std::{ - sync::mpsc::{channel, Sender as MpscSender}, + sync::{ + mpsc::{channel, Sender as MpscSender}, + Arc, + }, thread, thread::JoinHandle, }; @@ -34,6 +37,7 @@ pub use crate::error::{Error, Result}; #[derive(Clone)] pub struct DirectClient { url: String, + web_socket_control: Arc, } pub trait DirectApi { /// Server connection with only one response. @@ -44,11 +48,14 @@ pub trait DirectApi { fn get_mu_ra_url(&self) -> Result; fn get_untrusted_worker_url(&self) -> Result; fn get_state_metadata(&self) -> Result; + + /// Close any open websocket connection. + fn close(&self) -> Result<()>; } impl DirectClient { pub fn new(url: String) -> Self { - Self { url } + Self { url, web_socket_control: Default::default() } } } @@ -57,7 +64,7 @@ impl DirectApi for DirectClient { let (port_in, port_out) = channel(); info!("[WorkerApi Direct]: (get) Sending request: {:?}", request); - WsClient::connect(&self.url, request, &port_in, false)?; + WsClient::connect_one_shot(&self.url, request, &port_in)?; port_out.recv().map_err(Error::MspcReceiver) } @@ -65,8 +72,12 @@ impl DirectApi for DirectClient { info!("[WorkerApi Direct]: (watch) Sending request: {:?}", request); let url = self.url.clone(); + let web_socket_control = self.web_socket_control.clone(); // Unwrap is fine here, because JoinHandle can be used to handle a Thread panic. - thread::spawn(move || WsClient::connect(&url, &request, &sender, true).unwrap()) + thread::spawn(move || { + WsClient::connect_watch_with_control(&url, &request, &sender, web_socket_control) + .unwrap() + }) } fn get_rsa_pubkey(&self) -> Result { @@ -74,7 +85,7 @@ impl DirectApi for DirectClient { RpcRequest::compose_jsonrpc_call("author_getShieldingKey".to_string(), vec![]); // Send json rpc call to ws server. - let response_str = Self::get(self, &jsonrpc_call)?; + let response_str = self.get(&jsonrpc_call)?; let shielding_pubkey_string = decode_from_rpc_response(&response_str)?; let shielding_pubkey: Rsa3072PubKey = serde_json::from_str(&shielding_pubkey_string)?; @@ -88,7 +99,7 @@ impl DirectApi for DirectClient { RpcRequest::compose_jsonrpc_call("author_getMuRaUrl".to_string(), vec![]); // Send json rpc call to ws server. - let response_str = Self::get(self, &jsonrpc_call)?; + let response_str = self.get(&jsonrpc_call)?; let mu_ra_url: String = decode_from_rpc_response(&response_str)?; @@ -101,19 +112,20 @@ impl DirectApi for DirectClient { RpcRequest::compose_jsonrpc_call("author_getUntrustedUrl".to_string(), vec![]); // Send json rpc call to ws server. - let response_str = Self::get(self, &jsonrpc_call)?; + let response_str = self.get(&jsonrpc_call)?; let untrusted_url: String = decode_from_rpc_response(&response_str)?; info!("[+] Got untrusted websocket url of worker: {}", untrusted_url); Ok(untrusted_url) } + fn get_state_metadata(&self) -> Result { let jsonrpc_call: String = RpcRequest::compose_jsonrpc_call("state_getMetadata".to_string(), vec![]); // Send json rpc call to ws server. - let response_str = Self::get(self, &jsonrpc_call)?; + let response_str = self.get(&jsonrpc_call)?; //Decode rpc response let rpc_response: RpcResponse = serde_json::from_str(&response_str)?; @@ -125,6 +137,10 @@ impl DirectApi for DirectClient { println!("[+] Got metadata of enclave runtime"); Ok(metadata) } + + fn close(&self) -> Result<()> { + self.web_socket_control.close_connection() + } } fn decode_from_rpc_response(json_rpc_response: &str) -> Result { @@ -136,3 +152,84 @@ fn decode_from_rpc_response(json_rpc_response: &str) -> Result { _ => Err(Error::Status(response_message)), } } + +#[cfg(test)] +mod tests { + use super::*; + use itc_tls_websocket_server::{test::fixtures::test_server::create_server, WebSocketServer}; + use std::collections::VecDeque; + + #[test] + fn watch_works_and_closes_connection_on_demand() { + let _ = env_logger::builder().is_test(true).try_init(); + + const END_MESSAGE: &str = "End of service."; + let responses = VecDeque::from([END_MESSAGE.to_string()]); + + let port = 22334; + let (server, handler) = create_server(responses, port); + + let server_clone = server.clone(); + let server_join_handle = thread::spawn(move || server_clone.run()); + + // Wait until server is up. + thread::sleep(std::time::Duration::from_millis(50)); + + let client = DirectClient::new(format!("wss://localhost:{}", port)); + let (message_sender, message_receiver) = channel::(); + + let client_join_handle = client.watch("Request".to_string(), message_sender); + + let mut messages = Vec::::new(); + loop { + info!("Client waiting to receive answer.. "); + let message = message_receiver.recv().unwrap(); + info!("Received answer: {}", message); + let do_close = message.as_str() == END_MESSAGE; + messages.push(message); + + if do_close { + info!("Client closing connection"); + break + } + } + + info!("Joining client thread"); + client.close().unwrap(); + client_join_handle.join().unwrap(); + + info!("Joining server thread"); + server.shut_down().unwrap(); + server_join_handle.join().unwrap().unwrap(); + + assert_eq!(1, messages.len()); + assert_eq!(1, handler.messages_handled.read().unwrap().len()); + } + + #[test] + fn get_works_and_closes_connection() { + let _ = env_logger::builder().is_test(true).try_init(); + + let server_response = "response 1".to_string(); + let responses = VecDeque::from([server_response.clone()]); + + let port = 22335; + let (server, handler) = create_server(responses, port); + + let server_clone = server.clone(); + let server_join_handle = thread::spawn(move || server_clone.run()); + + // Wait until server is up. + thread::sleep(std::time::Duration::from_millis(50)); + + let client = DirectClient::new(format!("wss://localhost:{}", port)); + let received_response = client.get("Request").unwrap(); + + info!("Joining server thread"); + server.shut_down().unwrap(); + server_join_handle.join().unwrap().unwrap(); + + assert_eq!(server_response, received_response); + assert_eq!(1, handler.messages_handled.read().unwrap().len()); + } +} diff --git a/core/rpc-client/src/mock.rs b/core/rpc-client/src/mock.rs index 9b2579d615..89b7a98e01 100644 --- a/core/rpc-client/src/mock.rs +++ b/core/rpc-client/src/mock.rs @@ -94,4 +94,8 @@ impl DirectApi for DirectClientMock { }; RuntimeMetadataPrefixed::decode(&mut metadata.as_slice()).map_err(|e| e.into()) } + + fn close(&self) -> Result<()> { + unimplemented!() + } } diff --git a/core/rpc-client/src/ws_client.rs b/core/rpc-client/src/ws_client.rs index 0cfd984465..8c47a20087 100644 --- a/core/rpc-client/src/ws_client.rs +++ b/core/rpc-client/src/ws_client.rs @@ -1,66 +1,131 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::error::Result as RpcClientResult; ///! Websocket client implementation to access the direct-rpc-server running inside an enclave. /// /// This should be replaced with the `jsonrpsee::WsClient`as soon as available in no-std: /// https://github.com/paritytech/jsonrpsee/issues/1 use log::*; use openssl::ssl::{SslConnector, SslMethod, SslStream, SslVerifyMode}; -use std::sync::mpsc::Sender as MpscSender; -use url; +use parking_lot::Mutex; +use std::sync::{mpsc::Sender as MpscSender, Arc}; +use url::{self}; use ws::{connect, util::TcpStream, CloseCode, Handler, Handshake, Message, Result, Sender}; +/// Control a registered web-socket client. +#[derive(Default)] +pub struct WsClientControl { + subscriber: Mutex>, +} + +impl Clone for WsClientControl { + fn clone(&self) -> Self { + WsClientControl { subscriber: Mutex::new(self.subscriber.lock().clone()) } + } +} + +impl WsClientControl { + pub fn close_connection(&self) -> RpcClientResult<()> { + if let Some(s) = self.subscriber.lock().as_ref() { + debug!("Closing connection"); + s.close(CloseCode::Normal)?; + debug!("Connection is closed"); + } + Ok(()) + } + + fn subscribe_sender(&self, sender: Sender) -> RpcClientResult<()> { + let mut subscriber_lock = self.subscriber.lock(); + *subscriber_lock = Some(sender); + Ok(()) + } +} + #[derive(Clone)] pub struct WsClient { - pub out: Sender, - pub request: String, - pub result: MpscSender, - pub do_watch: bool, + web_socket: Sender, + request: String, + result: MpscSender, + do_watch: bool, } impl WsClient { - pub fn new( - out: Sender, - request: String, - result: MpscSender, - do_watch: bool, - ) -> WsClient { - WsClient { out, request, result, do_watch } - } - - pub fn connect( + /// Connect a web-socket client for multiple request/responses. + /// + /// Control over the connection is done using the provided client control. + /// (e.g. shutdown has to be initiated explicitly). + pub fn connect_watch_with_control( url: &str, request: &str, result: &MpscSender, - do_watch: bool, + control: Arc, ) -> Result<()> { connect(url.to_string(), |out| { - WsClient::new(out, request.to_string(), result.clone(), do_watch) + control.subscribe_sender(out.clone()).unwrap(); + WsClient::new(out, request.to_string(), result.clone(), true) + }) + } + + /// Connects a web-socket client for a one-shot request. + pub fn connect_one_shot(url: &str, request: &str, result: &MpscSender) -> Result<()> { + connect(url.to_string(), |out| { + WsClient::new(out, request.to_string(), result.clone(), false) }) } + + fn new( + web_socket: Sender, + request: String, + result: MpscSender, + do_watch: bool, + ) -> WsClient { + WsClient { web_socket, request, result, do_watch } + } } impl Handler for WsClient { fn on_open(&mut self, _: Handshake) -> Result<()> { debug!("sending request: {:?}", self.request.clone()); - match self.out.send(self.request.clone()) { + match self.web_socket.send(self.request.clone()) { Ok(_) => Ok(()), Err(e) => Err(e), } } fn on_message(&mut self, msg: Message) -> Result<()> { - debug!("got message"); - debug!("{}", msg); - debug!("sending result to MpscSender.."); + trace!("got message"); + trace!("{}", msg); + trace!("sending result to MpscSender.."); self.result.send(msg.to_string()).unwrap(); if !self.do_watch { debug!("do_watch is false, closing connection"); - self.out.close(CloseCode::Normal).unwrap(); - debug!("connection is closed"); + self.web_socket.close(CloseCode::Normal).unwrap(); + debug!("Connection close requested"); } debug!("on_message successful, returning"); Ok(()) } + fn on_close(&mut self, _code: CloseCode, _reason: &str) { + debug!("Web-socket close"); + self.web_socket.shutdown().unwrap() + } + /// we are overriding the `upgrade_ssl_client` method in order to disable hostname verification /// this is taken from https://github.com/housleyjk/ws-rs/blob/master/examples/unsafe-ssl-client.rs /// TODO: hostname verification should probably be enabled again for production? diff --git a/core/rpc-server/Cargo.toml b/core/rpc-server/Cargo.toml index 7edd6c36ff..b826f09296 100644 --- a/core/rpc-server/Cargo.toml +++ b/core/rpc-server/Cargo.toml @@ -11,7 +11,7 @@ log = "0.4.14" jsonrpsee = { version = "0.2.0-alpha.7", features = ["full"] } serde_json = "1.0.64" tokio = { version = "1.6.1", features = ["full"] } -parity-scale-codec = "2.1.3" +parity-scale-codec = "3.0.0" # local itp-enclave-api = { path = "../../core-primitives/enclave-api" } @@ -26,5 +26,5 @@ std = [] [dev-dependencies] env_logger = { version = "*" } -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } its-test = { path = "../../sidechain/test" } diff --git a/core/tls-websocket-server/Cargo.toml b/core/tls-websocket-server/Cargo.toml index d50282604f..99d8e96bcd 100644 --- a/core/tls-websocket-server/Cargo.toml +++ b/core/tls-websocket-server/Cargo.toml @@ -12,6 +12,7 @@ default = ["std"] sgx = [ "sgx_tstd", "sgx_types", + "mio-extras/sgx", "mio_sgx", "rustls_sgx", "webpki_sgx", @@ -20,30 +21,38 @@ sgx = [ ] std = [ "mio", + "mio-extras/std", "rustls", "webpki", "tungstenite", "thiserror", ] +mocks = [] [dependencies] # sgx dependencies -sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } -sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true, features = ["net", "thread"] } +sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } +sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true, features = ["net", "thread"] } # sgx enabled external libraries -mio_sgx = { package = "mio", git = "https://github.com/mesalock-linux/mio-sgx", tag = "sgx_1.1.3", optional = true } -rustls_sgx = { package = "rustls", git = "https://github.com/mesalock-linux/rustls", branch = "mesalock_sgx", optional = true } -webpki_sgx = { package = "webpki", git = "https://github.com/mesalock-linux/webpki", branch = "mesalock_sgx", optional = true } -tungstenite_sgx = { package = "tungstenite", git = "https://github.com/integritee-network/tungstenite-rs-sgx", branch = "sgx-experimental", optional = true, features = ["rustls-tls-webpki-roots"] } -thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linux/thiserror-sgx", tag = "sgx_1.1.3", optional = true } +mio_sgx = { package = "mio", git = "https://github.com/mesalock-linux/mio-sgx", tag = "sgx_1.1.3", optional = true } +mio-extras = { git = "https://github.com/integritee-network/mio-extras-sgx", rev = "963234b", default-features = false, optional = true } +rustls_sgx = { package = "rustls", git = "https://github.com/mesalock-linux/rustls", branch = "mesalock_sgx", optional = true } +webpki_sgx = { package = "webpki", git = "https://github.com/mesalock-linux/webpki", branch = "mesalock_sgx", optional = true } +tungstenite_sgx = { package = "tungstenite", git = "https://github.com/integritee-network/tungstenite-rs-sgx", branch = "sgx-experimental", optional = true, features = ["rustls-tls-webpki-roots"] } +thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linux/thiserror-sgx", tag = "sgx_1.1.3", optional = true } # std compatible external libraries (make sure these versions match with the sgx-enabled ones above) -mio = { version = "0.6", optional = true } -rustls = { version = "0.19", optional = true } -webpki = { version = "0.21", optional = true } -tungstenite = { version = "0.14.0", optional = true } -thiserror = { version = "1.0", optional = true } +mio = { version = "0.6.14", optional = true } +rustls = { version = "0.19", optional = true } +webpki = { version = "0.21", optional = true } +tungstenite = { version = "0.15.0", optional = true, features = ["rustls-tls-webpki-roots"] } +thiserror = { version = "1.0", optional = true } # no-std compatible libraries log = { version = "0.4", default-features = false } + +[dev-dependencies] +env_logger = "0.9.0" +rustls = { version = "0.19", features = ["dangerous_configuration"] } +url = { version = "2.0.0" } diff --git a/core/tls-websocket-server/src/config_provider.rs b/core/tls-websocket-server/src/config_provider.rs new file mode 100644 index 0000000000..394c294fd2 --- /dev/null +++ b/core/tls-websocket-server/src/config_provider.rs @@ -0,0 +1,45 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(all(not(feature = "std"), feature = "sgx"))] +use crate::sgx_reexport_prelude::*; + +use crate::{error::WebSocketResult, tls_common::make_config}; +use rustls::ServerConfig; +use std::{string::String, sync::Arc}; + +/// Trait to provide a Rustls server config. +pub trait ProvideServerConfig: Send + Sync { + fn get_config(&self) -> WebSocketResult>; +} + +pub struct FromFileConfigProvider { + private_key_path: String, + certificates_path: String, +} + +impl FromFileConfigProvider { + pub fn new(private_key_path: String, certificates_path: String) -> Self { + Self { private_key_path, certificates_path } + } +} + +impl ProvideServerConfig for FromFileConfigProvider { + fn get_config(&self) -> WebSocketResult> { + make_config(self.certificates_path.as_str(), self.private_key_path.as_str()) + } +} diff --git a/core/tls-websocket-server/src/connection.rs b/core/tls-websocket-server/src/connection.rs index 7e2f122e65..a21e16fcf1 100644 --- a/core/tls-websocket-server/src/connection.rs +++ b/core/tls-websocket-server/src/connection.rs @@ -18,96 +18,284 @@ #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; -use crate::{WebSocketConnection, WebSocketError, WebSocketResult}; +use crate::{ + error::WebSocketError, stream_state::StreamState, WebSocketConnection, WebSocketMessageHandler, + WebSocketResult, +}; use log::*; -use rustls::ServerSession; +use mio::{event::Event, net::TcpStream, Poll, Ready, Token}; +use rustls::{ServerSession, Session}; use std::{ format, - net::TcpStream, string::{String, ToString}, + sync::Arc, }; -use tungstenite::{accept, Message, WebSocket}; - -type RustlsStream = rustls::StreamOwned; -type RustlsWebSocket = WebSocket; +use tungstenite::Message; -pub struct TungsteniteWsConnection { - web_socket: RustlsWebSocket, +/// A web-socket connection object. +pub struct TungsteniteWsConnection { + stream_state: StreamState, + connection_token: Token, + connection_handler: Arc, + is_closed: bool, } -impl TungsteniteWsConnection { - pub fn connect( +impl TungsteniteWsConnection +where + Handler: WebSocketMessageHandler, +{ + pub fn new( tcp_stream: TcpStream, server_session: ServerSession, - ) -> WebSocketResult { - let tls_stream = rustls::StreamOwned::new(server_session, tcp_stream); - let web_socket = accept(tls_stream).map_err(|_| WebSocketError::HandShakeError)?; + connection_token: Token, + handler: Arc, + ) -> WebSocketResult { + Ok(TungsteniteWsConnection { + stream_state: StreamState::from_stream(rustls::StreamOwned::new( + server_session, + tcp_stream, + )), + connection_token, + connection_handler: handler, + is_closed: false, + }) + } + + fn do_tls_read(&mut self) -> ConnectionState { + let tls_stream = match self.stream_state.internal_stream_mut() { + None => return ConnectionState::Closing, + Some(s) => s, + }; + + let tls_session = &mut tls_stream.sess; + + match tls_session.read_tls(&mut tls_stream.sock) { + Ok(r) => + if r == 0 { + return ConnectionState::Closing + }, + Err(err) => { + if let std::io::ErrorKind::WouldBlock = err.kind() { + debug!("TLS session is blocked"); + return ConnectionState::Blocked + } + warn!("I/O error after reading TLS data: {:?}", err); + }, + } - Ok(TungsteniteWsConnection { web_socket }) + match tls_session.process_new_packets() { + Ok(_) => { + if tls_session.is_handshaking() { + return ConnectionState::TlsHandshake + } + ConnectionState::Alive + }, + Err(e) => { + error!("cannot process TLS packet(s), closing connection: {:?}", e); + ConnectionState::Closing + }, + } } - fn read_next_message(&mut self) -> WebSocketResult { - // loop until we have a Message::Text - loop { - let message = - self.web_socket.read_message().map_err(|_| WebSocketError::ConnectionClosed)?; - if let Message::Text(s) = message { - return Ok(s) - } + fn do_tls_write(&mut self) -> ConnectionState { + let tls_stream = match self.stream_state.internal_stream_mut() { + None => return ConnectionState::Closing, + Some(s) => s, + }; + + match tls_stream.sess.write_tls(&mut tls_stream.sock) { + Ok(_) => { + trace!("TLS write successful, connection is alive"); + if tls_stream.sess.is_handshaking() { + return ConnectionState::TlsHandshake + } + ConnectionState::Alive + }, + Err(e) => { + error!("TLS write error: {:?}", e); + ConnectionState::Closing + }, } } - fn write_message(&mut self, message: &str) -> WebSocketResult<()> { - if !self.web_socket.can_write() { - return Err(WebSocketError::ConnectionClosed) + /// Read from a web-socket, or initiate handshake if websocket is not initialized yet. + /// + /// Returns a boolean 'connection should be closed'. + fn read_or_initialize_websocket(&mut self) -> WebSocketResult { + if let StreamState::EstablishedWebsocket(web_socket) = &mut self.stream_state { + match web_socket.read_message() { + Ok(m) => + if let Err(e) = self.handle_message(m) { + error!("Failed to handle web-socket message: {:?}", e); + }, + Err(e) => match e { + tungstenite::Error::ConnectionClosed => return Ok(true), + tungstenite::Error::AlreadyClosed => return Ok(true), + _ => error!("Failed to read message from web-socket: {:?}", e), + }, + } + } else { + self.stream_state = std::mem::take(&mut self.stream_state).attempt_handshake(); + if self.stream_state.is_invalid() { + warn!("Web-socket connection ({:?}) failed, closing", self.connection_token); + return Ok(true) + } } - self.web_socket - .write_message(Message::Text(message.to_string())) - .map_err(|e| WebSocketError::SocketWriteError(format!("{:?}", e))) + Ok(false) } -} -impl WebSocketConnection for TungsteniteWsConnection { - fn process_request(&mut self, initial_call: F) -> WebSocketResult - where - F: Fn(&str) -> String, - { - debug!("processing web socket request"); + fn handle_message(&mut self, message: Message) -> WebSocketResult<()> { + match message { + Message::Text(string_message) => { + trace!("Got Message::Text on web-socket, calling handler.."); + if let Some(reply) = self + .connection_handler + .handle_message(self.connection_token.into(), string_message)? + { + trace!("Handling message yielded a reply, sending it now.."); + self.write_message(reply)?; + trace!("Reply sent successfully"); + } + }, + Message::Binary(_) => { + warn!("received binary message, don't have a handler for this format"); + }, + Message::Close(_) => { + debug!("Received close frame, driving web-socket connection to close"); + if let StreamState::EstablishedWebsocket(web_socket) = &mut self.stream_state { + // We need to call write_pending until it returns an error that connection is closed. + loop { + if let Err(e) = web_socket.write_pending() { + match e { + tungstenite::Error::ConnectionClosed + | tungstenite::Error::AlreadyClosed => break, + _ => {}, + } + } + } + } + }, + _ => {}, + } + Ok(()) + } - let request = self.read_next_message()?; + pub(crate) fn write_message(&mut self, message: String) -> WebSocketResult<()> { + match &mut self.stream_state { + StreamState::EstablishedWebsocket(web_socket) => { + if !web_socket.can_write() { + return Err(WebSocketError::ConnectionClosed) + } - let response = (initial_call)(request.as_str()); + web_socket + .write_message(Message::Text(message)) + .map_err(|e| WebSocketError::SocketWriteError(format!("{:?}", e))) + }, + _ => + Err(WebSocketError::SocketWriteError("No active web-socket available".to_string())), + } + } +} - self.write_message(response.as_str())?; +impl WebSocketConnection for TungsteniteWsConnection +where + Handler: WebSocketMessageHandler, +{ + type Socket = TcpStream; - debug!("successfully processed web socket request"); - Ok(response) + fn socket(&self) -> Option<&Self::Socket> { + self.stream_state.internal_stream().map(|s| &s.sock) } - fn send_update(&mut self, message: &str) -> WebSocketResult<()> { - debug!("sending web socket update"); - self.write_message(message) - } + fn get_session_readiness(&self) -> Ready { + match self.stream_state.internal_stream() { + None => mio::Ready::empty(), + Some(s) => { + let wants_read = s.sess.wants_read(); + let wants_write = s.sess.wants_write(); - fn close(&mut self) { - match self.web_socket.close(None) { - Ok(()) => { - debug!("web socket connection closed"); - }, - Err(e) => { - error!("failed to close web socket connection (already closed?): {:?}", e); + if wants_read && wants_write { + mio::Ready::readable() | mio::Ready::writable() + } else if wants_write { + mio::Ready::writable() + } else { + mio::Ready::readable() + } }, } + } - match self.web_socket.write_pending() { - Ok(()) => { - debug!("write_pending succeeded"); - }, - Err(e) => { - // a closed error is to be expected here (according to '.close()' documentation - debug!("flushed connection after closing, received error information: {:?}", e); - }, + fn on_ready(&mut self, poll: &mut Poll, event: &Event) -> WebSocketResult<()> { + let mut is_closing = false; + + if event.readiness().is_readable() { + trace!("Connection ({:?}) is readable", self.token()); + + let connection_state = self.do_tls_read(); + + if connection_state.is_alive() { + is_closing = self.read_or_initialize_websocket()?; + } else { + is_closing = connection_state.is_closing(); + } } + + if event.readiness().is_writable() { + trace!("Connection ({:?}) is writable", self.token()); + + let connection_state = self.do_tls_write(); + + if connection_state.is_alive() { + if let StreamState::EstablishedWebsocket(web_socket) = &mut self.stream_state { + trace!("Web-socket, write pending messages"); + if let Err(e) = web_socket.write_pending() { + match e { + tungstenite::Error::ConnectionClosed + | tungstenite::Error::AlreadyClosed => is_closing = true, + _ => error!("Failed to write pending web-socket messages: {:?}", e), + } + } + } + } else { + is_closing = connection_state.is_closing(); + } + } + + if is_closing { + debug!("Connection ({:?}) is closed", self.token()); + self.is_closed = true; + } else { + // Re-register with the poll. + self.reregister(poll)?; + } + Ok(()) + } + + fn is_closed(&self) -> bool { + self.is_closed + } + + fn token(&self) -> Token { + self.connection_token + } +} + +/// Internal connection state. +#[derive(Debug, Clone)] +enum ConnectionState { + Closing, + Blocked, + Alive, + TlsHandshake, +} + +impl ConnectionState { + pub(crate) fn is_alive(&self) -> bool { + matches!(self, ConnectionState::Alive) + } + + pub(crate) fn is_closing(&self) -> bool { + matches!(self, ConnectionState::Closing) } } diff --git a/core/tls-websocket-server/src/connection_id_generator.rs b/core/tls-websocket-server/src/connection_id_generator.rs new file mode 100644 index 0000000000..dac5431cb6 --- /dev/null +++ b/core/tls-websocket-server/src/connection_id_generator.rs @@ -0,0 +1,76 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + +use crate::{error::WebSocketError, WebSocketResult}; + +pub type ConnectionId = usize; + +/// Trait to generate IDs (nonce) for websocket connections. +pub trait GenerateConnectionId { + fn next_id(&self) -> WebSocketResult; +} + +pub struct ConnectionIdGenerator { + current_id: RwLock, +} + +const MIN_ID: usize = 10; + +impl Default for ConnectionIdGenerator { + fn default() -> Self { + Self { current_id: RwLock::new(MIN_ID) } + } +} + +impl GenerateConnectionId for ConnectionIdGenerator { + fn next_id(&self) -> WebSocketResult { + let mut id_lock = self.current_id.write().map_err(|_| WebSocketError::LockPoisoning)?; + *id_lock = id_lock.checked_add(1).unwrap_or(MIN_ID); + Ok(*id_lock) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ws_server::{NEW_CONNECTIONS_LISTENER, SERVER_SIGNAL_TOKEN}; + + #[test] + fn next_id_works() { + let id_generator = ConnectionIdGenerator::default(); + + assert_eq!(11, id_generator.next_id().unwrap()); + assert_eq!(12, id_generator.next_id().unwrap()); + assert_eq!(13, id_generator.next_id().unwrap()); + } + + #[test] + fn next_id_is_greater_than_default_tokens() { + let id_generator = ConnectionIdGenerator::default(); + + let first_id = id_generator.next_id().unwrap(); + + assert!(NEW_CONNECTIONS_LISTENER < mio::Token(first_id)); + assert!(SERVER_SIGNAL_TOKEN < mio::Token(first_id)); + } +} diff --git a/core/tls-websocket-server/src/error.rs b/core/tls-websocket-server/src/error.rs new file mode 100644 index 0000000000..3d86b509dc --- /dev/null +++ b/core/tls-websocket-server/src/error.rs @@ -0,0 +1,55 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(all(not(feature = "std"), feature = "sgx"))] +use crate::sgx_reexport_prelude::*; + +use crate::ConnectionId; +use std::{boxed::Box, io::Error as IoError, net::AddrParseError, string::String}; + +pub type WebSocketResult = Result; + +/// General web-socket error type +#[derive(Debug, thiserror::Error)] +pub enum WebSocketError { + #[error("Invalid certificate: {0}")] + InvalidCertificate(String), + #[error("Invalid private key: {0}")] + InvalidPrivateKey(String), + #[error("Invalid web-socket address: {0}")] + InvalidWsAddress(AddrParseError), + #[error("TCP bind: {0}")] + TcpBindError(IoError), + #[error("Web-socket hand shake: {0}")] + HandShakeError(String), + #[error("{0} is not a valid and active web-socket connection id")] + InvalidConnection(ConnectionId), + #[error("Web-socket connection already closed error")] + ConnectionClosed, + #[error("Web-socket connection has not yet been established")] + ConnectionNotYetEstablished, + #[error("Web-socket write: {0}")] + SocketWriteError(String), + #[error("Lock poisoning")] + LockPoisoning, + #[error("Failed to receive server signal message: {0}")] + MioReceiveError(#[from] std::sync::mpsc::TryRecvError), + #[error("{0}")] + IoError(#[from] std::io::Error), + #[error("{0}")] + Other(Box), +} diff --git a/core/tls-websocket-server/src/lib.rs b/core/tls-websocket-server/src/lib.rs index 773533b130..00e51d38c3 100644 --- a/core/tls-websocket-server/src/lib.rs +++ b/core/tls-websocket-server/src/lib.rs @@ -20,6 +20,7 @@ #[cfg(all(feature = "std", feature = "sgx"))] compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the same time"); +extern crate alloc; #[cfg(all(not(feature = "std"), feature = "sgx"))] extern crate sgx_tstd as std; @@ -36,87 +37,136 @@ pub mod sgx_reexport_prelude { #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; -extern crate alloc; - -use crate::{connection::TungsteniteWsConnection, ws_server::TungsteniteWsServer}; -use alloc::boxed::Box; -use log::*; +use crate::{ + config_provider::FromFileConfigProvider, + connection_id_generator::{ConnectionId, ConnectionIdGenerator}, + error::{WebSocketError, WebSocketResult}, + ws_server::TungsteniteWsServer, +}; +use mio::{event::Evented, Token}; use std::{ - io::Error as IoError, - net::AddrParseError, + fmt::Debug, string::{String, ToString}, sync::Arc, }; -mod common; -pub mod connection; -mod ws_server; - -/// General web-socket error type -#[derive(Debug, thiserror::Error)] -pub enum WebSocketError { - #[error("Invalid certificate error: {0}")] - InvalidCertificate(String), - #[error("Invalid private key error: {0}")] - InvalidPrivateKey(String), - #[error("Invalid web-socket address error: {0}")] - InvalidWsAddress(AddrParseError), - #[error("TCP bind error: {0}")] - TcpBindError(IoError), - #[error("Web-socket hand shake error")] - HandShakeError, - #[error("Web-socket connection already closed error")] - ConnectionClosed, - #[error("Web-socket connection has not yet been established")] - ConnectionNotYetEstablished, - #[error("Web-socket write error: {0}")] - SocketWriteError(String), - #[error("Web-socket handler error: {0}")] - HandlerError(Box), -} +pub mod config_provider; +mod connection; +pub mod connection_id_generator; +pub mod error; +mod stream_state; +mod tls_common; +pub mod ws_server; -pub type WebSocketResult = Result; +#[cfg(any(test, feature = "mocks"))] +pub mod test; -/// abstraction of a web socket connection -pub trait WebSocketConnection: Send + Sync { - fn process_request(&mut self, initial_call: F) -> WebSocketResult - where - F: Fn(&str) -> String; +/// Connection token alias. +#[derive(Eq, PartialEq, Clone, Copy, Debug)] +pub struct ConnectionToken(pub usize); - fn send_update(&mut self, message: &str) -> WebSocketResult<()>; +impl From for Token { + fn from(c: ConnectionToken) -> Self { + Token(c.0) + } +} - fn close(&mut self); +impl From for ConnectionToken { + fn from(t: Token) -> Self { + ConnectionToken(t.0) + } } -/// Handles a web-socket connection -pub trait WebSocketHandler { - type Connection: WebSocketConnection; +/// Handles a web-socket connection message. +pub trait WebSocketMessageHandler: Send + Sync { + fn handle_message( + &self, + connection_token: ConnectionToken, + message: String, + ) -> WebSocketResult>; +} - fn handle(&self, connection: Self::Connection) -> WebSocketResult<()>; +/// Allows to send response messages to a specific connection. +pub trait WebSocketResponder: Send + Sync { + fn send_message( + &self, + connection_token: ConnectionToken, + message: String, + ) -> WebSocketResult<()>; } -/// Run a web-socket server with a given handler +/// Run a web-socket server with a given handler. pub trait WebSocketServer { type Connection; - fn run(&self, handler: Arc) -> WebSocketResult<()> - where - Handler: WebSocketHandler; + fn run(&self) -> WebSocketResult<()>; + + fn shut_down(&self) -> WebSocketResult<()>; +} + +/// Abstraction of a web socket connection using mio. +pub(crate) trait WebSocketConnection: Send + Sync { + /// Socket type, typically a TCP stream. + type Socket: Evented; + + /// Get the underlying socket (TCP stream) + fn socket(&self) -> Option<&Self::Socket>; + + /// Query the underlying session for readiness (read/write). + fn get_session_readiness(&self) -> mio::Ready; + + /// Handles the ready event, the connection has work to do. + fn on_ready(&mut self, poll: &mut mio::Poll, ev: &mio::event::Event) -> WebSocketResult<()>; + + /// True if connection was closed. + fn is_closed(&self) -> bool; + + /// Return the connection token (= ID) + fn token(&self) -> mio::Token; + + /// Register the connection with the mio poll. + fn register(&mut self, poll: &mio::Poll) -> WebSocketResult<()> { + match self.socket() { + Some(s) => { + poll.register( + s, + self.token(), + self.get_session_readiness(), + mio::PollOpt::level() | mio::PollOpt::oneshot(), + )?; + Ok(()) + }, + None => Err(WebSocketError::ConnectionClosed), + } + } + + /// Re-register the connection with the mio poll, after handling an event. + fn reregister(&mut self, poll: &mio::Poll) -> WebSocketResult<()> { + match self.socket() { + Some(s) => { + poll.reregister( + s, + self.token(), + self.get_session_readiness(), + mio::PollOpt::level() | mio::PollOpt::oneshot(), + )?; + + Ok(()) + }, + None => Err(WebSocketError::ConnectionClosed), + } + } } -pub fn run_ws_server(addr_plain: &str, handler: Arc) +pub fn create_ws_server( + addr_plain: &str, + handler: Arc, +) -> Arc> where - Handler: WebSocketHandler, + Handler: WebSocketMessageHandler, { - let cert = "end.fullchain".to_string(); - let key = "end.rsa".to_string(); + let config_provider = + Arc::new(FromFileConfigProvider::new("end.rsa".to_string(), "end.fullchain".to_string())); - let web_socket_server = TungsteniteWsServer::new(addr_plain.to_string(), cert, key); - - match web_socket_server.run(handler) { - Ok(()) => {}, - Err(e) => { - error!("Web socket server encountered an unexpected error: {:?}", e) - }, - } + Arc::new(TungsteniteWsServer::new(addr_plain.to_string(), config_provider, handler)) } diff --git a/core/tls-websocket-server/src/stream_state.rs b/core/tls-websocket-server/src/stream_state.rs new file mode 100644 index 0000000000..efcebdff59 --- /dev/null +++ b/core/tls-websocket-server/src/stream_state.rs @@ -0,0 +1,104 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(all(not(feature = "std"), feature = "sgx"))] +use crate::sgx_reexport_prelude::*; + +use log::*; +use mio::net::TcpStream; +use rustls::ServerSession; +use tungstenite::{ + accept, + handshake::{server::NoCallback, MidHandshake}, + HandshakeError, ServerHandshake, WebSocket, +}; + +pub(crate) type RustlsStream = rustls::StreamOwned; +pub(crate) type RustlsServerHandshake = ServerHandshake; +pub(crate) type RustlsMidHandshake = MidHandshake; +pub(crate) type RustlsWebSocket = WebSocket; + +/// Internal TLS stream state. From pure TLS stream, to web-socket handshake and established WS. +pub(crate) enum StreamState { + Invalid, + TlsStream(RustlsStream), + WebSocketHandshake(RustlsMidHandshake), + EstablishedWebsocket(RustlsWebSocket), +} + +impl Default for StreamState { + fn default() -> Self { + Self::Invalid + } +} + +impl StreamState { + pub(crate) fn from_stream(stream: RustlsStream) -> Self { + StreamState::TlsStream(stream) + } + + pub(crate) fn is_invalid(&self) -> bool { + matches!(self, StreamState::Invalid) + } + + pub(crate) fn internal_stream(&self) -> Option<&RustlsStream> { + match self { + StreamState::TlsStream(s) => Some(s), + StreamState::WebSocketHandshake(h) => Some(h.get_ref().get_ref()), + StreamState::EstablishedWebsocket(ws) => Some(ws.get_ref()), + StreamState::Invalid => None, + } + } + + pub(crate) fn internal_stream_mut(&mut self) -> Option<&mut RustlsStream> { + match self { + StreamState::TlsStream(s) => Some(s), + StreamState::WebSocketHandshake(h) => Some(h.get_mut().get_mut()), + StreamState::EstablishedWebsocket(ws) => Some(ws.get_mut()), + StreamState::Invalid => None, + } + } + + pub(crate) fn attempt_handshake(self) -> Self { + match self { + // We have the bare TLS stream only, attempt to do a web-socket handshake. + StreamState::TlsStream(tls_stream) => Self::from_handshake_result(accept(tls_stream)), + // We already have an on-going handshake, attempt another try. + StreamState::WebSocketHandshake(hs) => Self::from_handshake_result(hs.handshake()), + _ => self, + } + } + + fn from_handshake_result( + handshake_result: Result>, + ) -> Self { + match handshake_result { + Ok(ws) => Self::EstablishedWebsocket(ws), + Err(e) => match e { + // I/O would block our handshake attempt. Need to re-try. + HandshakeError::Interrupted(mhs) => { + warn!("Web-socket handshake interrupted"); + Self::WebSocketHandshake(mhs) + }, + HandshakeError::Failure(e) => { + error!("Web-socket handshake failed: {:?}", e); + Self::Invalid + }, + }, + } + } +} diff --git a/core/tls-websocket-server/src/test/fixtures/mod.rs b/core/tls-websocket-server/src/test/fixtures/mod.rs new file mode 100644 index 0000000000..6790e464c8 --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/mod.rs @@ -0,0 +1,22 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +pub mod no_cert_verifier; +pub mod test_cert; +pub mod test_private_key; +pub mod test_server; +pub mod test_server_config_provider; diff --git a/core/tls-websocket-server/src/test/fixtures/no_cert_verifier.rs b/core/tls-websocket-server/src/test/fixtures/no_cert_verifier.rs new file mode 100644 index 0000000000..50e05527ab --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/no_cert_verifier.rs @@ -0,0 +1,51 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use log::debug; +use rustls::{Certificate, ClientCertVerified, DistinguishedNames, TLSError}; +use webpki::DNSName; + +/// Test Rustls verifier, disables ALL verification (do NOT use in production!) +pub struct NoCertVerifier {} + +impl rustls::ServerCertVerifier for NoCertVerifier { + fn verify_server_cert( + &self, + _: &rustls::RootCertStore, + _: &[rustls::Certificate], + _: webpki::DNSNameRef<'_>, + _: &[u8], + ) -> Result { + debug!("Certificate verification bypassed"); + Ok(rustls::ServerCertVerified::assertion()) + } +} + +impl rustls::ClientCertVerifier for NoCertVerifier { + fn client_auth_root_subjects(&self, _sni: Option<&DNSName>) -> Option { + None + } + + fn verify_client_cert( + &self, + _presented_certs: &[Certificate], + _sni: Option<&DNSName>, + ) -> Result { + debug!("Certificate verification bypassed"); + Ok(rustls::ClientCertVerified::assertion()) + } +} diff --git a/core/tls-websocket-server/src/test/fixtures/test_cert.rs b/core/tls-websocket-server/src/test/fixtures/test_cert.rs new file mode 100644 index 0000000000..1b94e7a24a --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/test_cert.rs @@ -0,0 +1,139 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use rustls::{internal::pemfile::certs, Certificate}; +use std::{io::BufReader, vec::Vec}; + +pub fn get_test_certificate_chain() -> Vec { + let mut buf_reader = BufReader::new(CERT_STR.as_bytes()); + certs(&mut buf_reader).unwrap() +} + +const CERT_STR: &str = "\ +-----BEGIN CERTIFICATE----- +MIIEADCCAmigAwIBAgICAcgwDQYJKoZIhvcNAQELBQAwLDEqMCgGA1UEAwwhcG9u +eXRvd24gUlNBIGxldmVsIDIgaW50ZXJtZWRpYXRlMB4XDTE3MDQxMDIwNTYyN1oX +DTIyMTAwMTIwNTYyN1owGTEXMBUGA1UEAwwOdGVzdHNlcnZlci5jb20wggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCa4nonCxArES+kBBf9mZoaQ2GBMg74 +Pj2ve4RKJSIBt9A7EgJ4hFznFQ11O11Xvb3dVQGOK+pFRxh2xg0DJvV3lJytpvKe +mviyT5KSGvp6Hybqmx66B2V3iDfrXhhySqG5tKEeczFBIq+62dAp0+r0oSdpZKGT +1YDtXonjcbnDb93K7g8arEadFKYN3MAjBGQ3m5fsWJJuq4hLU1+dpmAfxmYH1dlc +n89LyPhYh0I7R5v17VrGlNCWIWD1emLtM8vTS94eMtp8R6MuMIZTOKgBTrIpU4G5 +GPcR3flDzzLsCxEttjjMa41zStKXzieUIwirRAzPv48V4JlkCCUPv97pAgMBAAGj +gb4wgbswDAYDVR0TAQH/BAIwADALBgNVHQ8EBAMCBsAwHQYDVR0OBBYEFNn77YZg +4AGguHBKVggK00dtRvhCMEIGA1UdIwQ7MDmAFGuwcG2Zfyr92yAiXU9HP9rBYC6/ +oR6kHDAaMRgwFgYDVQQDDA9wb255dG93biBSU0EgQ0GCAXswOwYDVR0RBDQwMoIO +dGVzdHNlcnZlci5jb22CFXNlY29uZC50ZXN0c2VydmVyLmNvbYIJbG9jYWxob3N0 +MA0GCSqGSIb3DQEBCwUAA4IBgQB4xB9IPNxkJIA8QtngQZCCSPH5SjfAibcLfwi2 +NLHe4hO4HvoIVv0ru7CODfq45qNfH7sUj8a/JBU8BwcJ3xPewWFdavtCP8+dapmd +pr831+Xx6p9tNIdW16WrCXEV8i9bHy43Y4pWbNdXQy5meI0qvSM/ExedZqqVeJJT +oXL/aCtMsBixlwlKvrsG9ZvIAl1ics0wA5kqQWVufe95loI+HUcPc9s9689H+/ON +lH8rTLPwyufk9h2dTb9Wzw3qewlDIqgoyX7k9cOwrJqA4D6typCvb5dWfQlK9c72 +4rGbqHSx7mrlaZ4typfAMdEbynRlDSgIIZGXb7RaoV3NT2XuVFd8+lcXgBiJMvPk +STejz77EPR2+uKvQ1gMJXpEHCBUvMMyDqhpcNzb0DaXgf4eYI9RqfxU1pkgYnfxe +DGDGI2SdmO43NwSDyEQVSlRpCIBj4ZDay3IP7mbdi8MLxR9H1BCHnN7D04UrTnuA +c/cl0RMWL+iHtKU2cCxltEQQ9qQ= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIGnzCCAoegAwIBAgIBezANBgkqhkiG9w0BAQsFADAaMRgwFgYDVQQDDA9wb255 +dG93biBSU0EgQ0EwHhcNMTcwNDEwMjA1NjI3WhcNMjcwNDA4MjA1NjI3WjAsMSow +KAYDVQQDDCFwb255dG93biBSU0EgbGV2ZWwgMiBpbnRlcm1lZGlhdGUwggGiMA0G +CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDCX7V0gKGQBS64QKntjDlBslbQJaxq +EL8Yyq+qjF6nkOoqENKWSzeNyQ76kPVlzeV03UCaIgTF4+FeQrUr7wauEz0FGmDh +yx/B4xy9ZXdBIftPB8iz8Q/KrKO6YM6tkj7ijvL8hP3MfssBkA+VoAxamPSIikfM +9kyttemjYizgM0ywebzKmQGJbEINZ80Kp63ayR/Uo/cORjlH3xbmtTsL3pd+k6Ro +xOMZKm1RIwOwGgxDW4ea294A4lXHwfwHGMsP0/xmqTZ0R/EpxLKeqJAQffTiVsBK +YEFzANn3nol1IYrdcZcgcs16KTnc5+XyL87KSdIgDgG3wmQvRCdLX5G6GChyP03Z +qQSYMkwGSNgCD1v4m14Z5XT2su7iilHfjsucvT4OukCe63nqeXIZ+w63YqbjTp/a +HMgrXVg1wMlSncl0OIKcjLOgJ5vbPOGk9DvF93JbRFp/9sAZmK89Ur4gBmgpq2Zn +bknK0LVt+aerP7rf8CPYE89olPVUW0owwrkCAwEAAaNeMFwwHQYDVR0OBBYEFGuw +cG2Zfyr92yAiXU9HP9rBYC6/MCAGA1UdJQEB/wQWMBQGCCsGAQUFBwMBBggrBgEF +BQcDAjAMBgNVHRMEBTADAQH/MAsGA1UdDwQEAwIB/jANBgkqhkiG9w0BAQsFAAOC +BAEARD9wwIHsAFWlzrRLw3JcAUDB906Ks7KZzdqe3n6FkbyHHP1N5JG25wXwgADS +qux6sZU7h6+q209IEqXWbw+nbxJs/3D1hLN6X2tVgsfkSflGsd3DfNPZI8qKUyOD +VYlql/EPEMBixXOeVpwxXc48rX/yVjxqCvhY/A7eIiAc+bzQtwozLppChyVitQGI +MViXRdGdFiybwTKoJMYXl6ztamk9TWhdvJ9znirol12b06Z3J0Kz0c/kqY7VVZqL +ba76+IAJjvWQE7PYEOqpFHOLpilv9j5d/0kBR4AgJaooFwcYnr6aJKfNUgGWEmdn +ELYmfa0qORllAM/yGoewRfWGLZBNgT0QFYg2IFjnp0W0wIXFRd7xVqldN+cTmMqk +szpVV7bqGvuk6SQNFjIZ8VIVc/mXua4WlwBODDRzKqU3bIgBTODgVq1edwqp6UjN +ECLAOe1p03GGMr4WSPDoFjlQlHy+NLUwZg3RI+HsAkow9WfP7KqGN4vFDC4ru9Pg +2uD28oTrOgYQpzKjQJSH3kC5feOUdrsET7zic75XO1J33CAlgbIZ2TSQDqnH2cY5 +bQsWSNA2Lle3wBbeHlCy7ACiaoeJS23TJV9n8PcsRwSmHA9NgT4WSavXwtZ0lBhI +60GY80VXo9ziQjvVTMZNymZ4FEqCvULHGhFI08Jqd1jOXjnPLY4WEARqkicBJvI1 +3t4sBLDU+PEqH7m8k3lCZd6D7XVDcc8bJock+DjXZIMbZY79UMuzyHocXNJpRfRT +cqS0qneltFe6Pea7y0PN2IDttGBLb1CVQpXhRkpFU8jtyXh3ulSZSJEeqLVRFgdv +PVwHWAhLPewVGDkgTrlWVNfiXxp1LWVTFzQFas9xWiY4byQk/DNQaaFwHpGoZgVc +qAzUVk20Msm2u9xvSbPcBGk0dL4fdlnOkyeq/k/fnNrGdRHJWuJe7QR73/N0u6fy +7H76xUXvcwwrxL8ma8nV9K+A7oM7YUiR1wagD9cnoDDBgQmH9Izvfw0PxJgqnLOe +lQGPVGRhmXNtLLG57dqgjrvERGy9u5NMxBlkH0giZTFyQXPQ+N75ouM4S3RL75PM +UaTOBtnyCj++5ysnDFlGqEXgy08rrtkCbbNfd9dnO568juXS6ExC6TEL/pUMhy+Z +ooIJ69Tt7R5dOLaKRrkX/nKHfCfLfXXnjyDmdRHRYrXvTWusF038OsqY89tb0F0u +S4Szv4/Bl1bhzx/XYMZv/y7XL0va8FQLiRTuvqJ9hTsE/Xkd4ZFrP1LaP6HzVR1g +tsFs2Gc8j7H299U3WLjNon0TL2uPXa77Vu+9h7QCi1W9Uzsv0xMvZ/KMEnXyaEBd +W1lqo85ih1nnfxcW+lmAz8QNGQ== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIJCjCCBPKgAwIBAgIJAI+QZnVEkxq/MA0GCSqGSIb3DQEBCwUAMBoxGDAWBgNV +BAMMD3Bvbnl0b3duIFJTQSBDQTAeFw0xNzA0MTAyMDU2MjdaFw0yNzA0MDgyMDU2 +MjdaMBoxGDAWBgNVBAMMD3Bvbnl0b3duIFJTQSBDQTCCBCIwDQYJKoZIhvcNAQEB +BQADggQPADCCBAoCggQBAMD0iyFqRCNhvD5T9WXO8caNGb5ecrlnqSUvfcc+6Xh9 +sShtK6DX2DZ+6YT2WWOZTk0I9T+XG2kujjyfdCbEDMWcT9so7/gPeIG/qFlhONCu +HC+zntuZrGgMEYpF3Xc41CyF3saredTJEo1J64TPEke8mohezIGZYM1vTtRnqW+1 +RstSNTu8a/B0VaG0iA5P5RuSGVmxczi4EWJtuXFhcbgqICaUt0vJdrU0Fmrmq0Iq +ZEIpgZKYirx5QW8b6Q5tv0YsnXNasXvHZQve4GgF449ewk9wWfYevD8UttHUEe2a +QeEKb2l7NxqyY6trGyVtTRlm4SnoOH/9VodTKUEmS6pds6XFtjRflxgom0TL7CXb +uJ9b6fkXQlnf01FqAbv5HC1sjgGlSZc7Yk8k09nWOR8mZMoHC+U4KEq+oM+m87q4 +U/GsEk8UsPslGIIHHK6W/sdU6zA9bR3QYmkD40Z7FbVfKVvDmKPlwI7NONqysD8V +UTPoB8aE7FeulZhlTxdK2EcW14AsjbFiPQ4zAVxj4bRj39RLgJYL+BvAF6PfRHb1 +Xb7ykbuTvT7VhNYXLlQagR9EyixT3Wu9WCWUc0xJKSATn1s2YBLNM7LO4MkYO9WG +YrejhNHG+54a7rtnnlG04Gs7OhM32baMH/DxT+EEAX4j0Dfww4RaCZcfq1gDPsVe ++RzqsjjqF8+IzE25SK38xgwT/o3n9r5Ele3/zadwy695KCfbkhVFSDAPvhiv8um5 +6NNP+dDymFRXGzV85xSK75ue3Dpj+MoSScmIdGLEcU5EqYcBFLCXGLYPDIW8Lb89 +mG1z7TkZOLIs+6v7kp4rrvyijsyLFZ+EKUmabAK42qdzASZ1o6ETDDfFBETMxjWA +oMmGmRkhsyfBTuCr1ESlTBQHj4vvxBrgXgHtHwUinBw/sofLbkFRZ4wz/cBOtwqW +HIu88/o33l6ywMowcjaoToIbK2a4rD/KFJiwLliGKZG2veiESRhnNUQyjxT/PIef +0gqx3i1eBGWvfQs/wUC8qI5UadTRhjMFCwMCDVycevZE8lcQ+7zi9tVu6mXife5J +yP/jxRNDLzpdM6C6puqk0XieZey782XZ7sPpDpS2tphwakINF/5X3t1qZsssZPqq +F1S2VIsL8qm6Z7HDHXex3o2tDUhc226YSp/T7D+IWP3UCs0NjJrldakhnAd7ykxT +b2cDh09GDYSbji4Y6WmgIbSAurqk6kt4MWrfx4yfEAlp8ujH6788lRDAiXN1RgzC +k8r21IOJONDG7Qk1rS0YUV4XyGz4SEpBdPTI7RM1fl5bDn4e+OslBcfWh5XplZrz +4D4Z9YWVZ8X6d7CiPYZIg35oo/45KGel3Z8algziVkMCAwEAAaNTMFEwHQYDVR0O +BBYEFOWXlO0crUtBejJo87v9lwg8PlE6MB8GA1UdIwQYMBaAFOWXlO0crUtBejJo +87v9lwg8PlE6MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggQBADUQ +YqVmS9o3/5Qp7Tr+JL5ZNRP1fRmV5kAqaKDC5I9ONKRYahHvoI0ojE/j+pmwI4gf +mp5mW8EgsNfooovrzVSHxJgBO9r1ogMlL9AvwlvVxLbexvLXpM/1QjD/7FID/TaK +1q5mhSBKaEYXqQ+8DN25aVsI/bwHx4eP11Ft6YjhPqaX/WutE/IIAMbgASRFtBlO +foTm++fpdn8rCg5LiLLpWrRLC3zUSUtFd7if3wQ4vcDdck09v9PjD5Lk34aYkowJ +oARbVmBMpAxwkMXaThP1fT7xlYPDhAA26UXksT5xUNzFPbmOVReuFT0drhJlF6e6 +SLTjy2BcrYuz5ieBmmY6QboBYH3SzUFKuamvnHLSic3i3u9Ly68XUjOtDKgYB7Y5 +oZtfZT+YFmz/R6eLUcGRRfcmLJ+i/OXjgyKVkYBMDafW3RI9fRp46Yr/lvOv5gFW +Vrn3Tfc9cSbYQgE4vuKXcs4aVVeX8uAyjcucMV3eLdxaBLUAezTpJseRfqtH2kCk +3JIV6m2y6Tm5EhhaSiHKbe6FtPFKhpu7m9AlquUzhBU9Aq59mbKp6jtV0mWhYwKB +K6REmWQqqAOtHIs7UIXDeN1ZByJ7q+et57RvMgMHc5My0d6a+gQAUssH4i73sVTz +Uej57DW9L7hK0GQpzGzGIO/9lYTzWMVa8EZG1Fa5nUgMh3N3Oy6qUQIqr8E8xT2O +IbKKV6Acx6lBiwii4JkruEMgVVEdsDWDVdP8Ov5lJvvIPLWLqnXsZ2sKCyZrVkgc +PTXVtYBLmn7Tuwody2MSaBONSqleJ1oPQJ9lsAKyqX4xpX05ZJu2kNhST2oq2127 +378GS85DqKDM3P187mjU2G8moqWaGKr6byiIr7ea5TkqIzpC3tKW5QRHvX9aanz0 +akQx6F+l3l4L8J0cXaKasUJTaCk3cWPbbVzo8tQwwdxd0/MdJWrmitK85o+4gLqG +Cvn9VA4mnhjRR0XccxEtzmhSxBRWXoCF1+FnfDmXhPji+AmAhVqRwPkqX9T9H+54 +YG2ZA9Trxssme+QFSFCPZrHuw66ZI6GmKo6h+Hr2qew7LytASN+x2QyvRf7tSNmf +oUgmiD+CFpaH6exjrCC0/hcJ53Kv3E5GBvQskvOqgsUkW+nmsrm95YOosn+9MoQc +PIM6zQCmZ0N/6jHrEHnOnSnz03tGHsvPs6tMB6DKhQz9FNqlrLG7UHhlqhFWj9nv +H+Zh0oOwbcgcoxkk+W6LHLDpA3UpC1tlOzTlD2ektACvQQr/2A/fecpJN/7iWlX9 +BimWwRTS24bO5dX92Kb8V1TNO6ARd9TqOkPXRatysyh7it/MXpc5I2+t49hqlXoV +9Xpi4ds6s2cT8zZGDKI= +-----END CERTIFICATE-----"; diff --git a/core/tls-websocket-server/src/test/fixtures/test_private_key.rs b/core/tls-websocket-server/src/test/fixtures/test_private_key.rs new file mode 100644 index 0000000000..0e3ad60d01 --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/test_private_key.rs @@ -0,0 +1,53 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use rustls::{internal::pemfile::rsa_private_keys, PrivateKey}; +use std::io::BufReader; + +pub fn get_test_private_key() -> PrivateKey { + let mut buf_reader = BufReader::new(PRIVATE_KEY_STR.as_bytes()); + rsa_private_keys(&mut buf_reader).unwrap().first().unwrap().clone() +} + +const PRIVATE_KEY_STR: &str = "\ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAmuJ6JwsQKxEvpAQX/ZmaGkNhgTIO+D49r3uESiUiAbfQOxIC +eIRc5xUNdTtdV7293VUBjivqRUcYdsYNAyb1d5Scrabynpr4sk+Skhr6eh8m6pse +ugdld4g3614YckqhubShHnMxQSKvutnQKdPq9KEnaWShk9WA7V6J43G5w2/dyu4P +GqxGnRSmDdzAIwRkN5uX7FiSbquIS1NfnaZgH8ZmB9XZXJ/PS8j4WIdCO0eb9e1a +xpTQliFg9Xpi7TPL00veHjLafEejLjCGUzioAU6yKVOBuRj3Ed35Q88y7AsRLbY4 +zGuNc0rSl84nlCMIq0QMz7+PFeCZZAglD7/e6QIDAQABAoIBAQCEe5i08Nehnw+7 +Ie1LdSnFsUEj+6emW8bz5ZlguqZ+BbbN8DfA0qeM2gsq7d6IALr5KY8tBw9atteM +MRhMS/THloz2VMlPNYvpKftbkkwSTbdCEfGUemMmfZQnddM/X+s6J/FxVGMbLgpW +r51JSgW9vmMx2WwEQioH4EfeDxcwvZi3LF7SAo89eMSiSDqHZaIfMRmS0cSpoXav +u7gKDt7H+zSeYdLC4FhD4f8zRUpZEa4x5GIIm2JHsvIWuy9XKyepakaObJkWWqR1 +ATO94LtM2+RRVUev+yOVDDOfJtDzEqZrbokCHaVBYXgliAV/XkvFox1ZINyeGFq4 +kAvqfiQJAoGBAMhO/tAz2TpWeETMcujBekx1JmtDEUITJroDT0DvFDV5QRKVopxY +ZY5pPbwtk60KknBbsXrswR3Vh1q3xfKLT3Ln4x121ufltIwN7eopY9dXVqh830CU +QymtUz5VcvG3foWCeABcyklpZIdhHyDDDDP46URfFr3NnQiRnx7qb6yPAoGBAMXy +bSGgnBPUOWHtNW4hI5vxiOiCGWvCq7jERixybGMU8+kP6eRWUEAnOdCibq84A6gv +GLO5EW+bmL8l7L797w6ZN9DhbuR7W7hQVwdkyQS8PUgmTfsaba7+9hTC0chl+L38 +A7NlYRju+JS99SqarGA6WMvo30ykiMGwxw8tHOkHAoGAPT6Z/oK72nBx2WdBgxUV +FaeEFaut7Sv53UoBw3LWFPt7//isfW0xr/dRnuW4j2H6IEyI2XLmIP8WoZAq/9vE +cPeho3KghsrfByuDIOOC2Wak4mM7x30NhAKwvxBVUr6t+phHpKS6XPPSfuodIGFC +q+lhOTxxsZradrI/mq5HctUCgYEAqo4bYeIVGTC+0JWmd+Gt4OvYXx3Z8XOmqmjT +XfCpWyXuk13W1ZtZQi2KLy4F2IuW+w65ZgGL+HJExk5TEq2RkS6LXTsgZVW0zbbL +hd9dJOtckhIPFtDKuQGN3o2OW/EgxfGi7qvnYahmHyMdXzwuUitz3x4jaNJL0zgS +DA1+33kCgYA1iAZ58XXJPh6YObvw+kg21dCLLelxp+mCoRBSbY6wq+R6PmKg4a1N +oOc6Rh/1teyBVWJ/KnkXBeh9//XLfhg0r6zHDSCsDKabeM0eoB1AKWlc5f6bWYHV +60JHDgby+V1AElKT2yQT8KVv1hWJH4XQ1/fTQpQDDoo6O+nj1r4q6w== +-----END RSA PRIVATE KEY-----"; diff --git a/core/tls-websocket-server/src/test/fixtures/test_server.rs b/core/tls-websocket-server/src/test/fixtures/test_server.rs new file mode 100644 index 0000000000..194a4ed9b7 --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/test_server.rs @@ -0,0 +1,41 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + test::{ + fixtures::test_server_config_provider::TestServerConfigProvider, + mocks::web_socket_handler_mock::WebSocketHandlerMock, + }, + TungsteniteWsServer, +}; +use std::{collections::VecDeque, string::String, sync::Arc}; + +pub type TestServer = TungsteniteWsServer; + +pub fn create_server( + handler_responses: VecDeque, + port: u16, +) -> (Arc, Arc) { + let config_provider = Arc::new(TestServerConfigProvider {}); + let handler = Arc::new(WebSocketHandlerMock::from_response_sequence(handler_responses)); + + let server_addr_string = format!("127.0.0.1:{}", port); + + let server = + Arc::new(TungsteniteWsServer::new(server_addr_string, config_provider, handler.clone())); + (server, handler) +} diff --git a/core/tls-websocket-server/src/test/fixtures/test_server_config_provider.rs b/core/tls-websocket-server/src/test/fixtures/test_server_config_provider.rs new file mode 100644 index 0000000000..7f267aadf5 --- /dev/null +++ b/core/tls-websocket-server/src/test/fixtures/test_server_config_provider.rs @@ -0,0 +1,43 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + config_provider::ProvideServerConfig, + test::fixtures::{ + test_cert::get_test_certificate_chain, test_private_key::get_test_private_key, + }, + WebSocketResult, +}; +use rustls::{NoClientAuth, ServerConfig}; +use std::sync::Arc; + +pub struct TestServerConfigProvider; + +impl ProvideServerConfig for TestServerConfigProvider { + fn get_config(&self) -> WebSocketResult> { + let mut config = rustls::ServerConfig::new(NoClientAuth::new()); + + let certs = get_test_certificate_chain(); + let privkey = get_test_private_key(); + + config + .set_single_cert_with_ocsp_and_sct(certs, privkey, vec![], vec![]) + .unwrap(); + + Ok(Arc::new(config)) + } +} diff --git a/core/tls-websocket-server/src/test/mocks/mod.rs b/core/tls-websocket-server/src/test/mocks/mod.rs new file mode 100644 index 0000000000..fd5dff2b6c --- /dev/null +++ b/core/tls-websocket-server/src/test/mocks/mod.rs @@ -0,0 +1,19 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +pub mod web_socket_connection_mock; +pub mod web_socket_handler_mock; diff --git a/core/tls-websocket-server/src/test/mocks/web_socket_connection_mock.rs b/core/tls-websocket-server/src/test/mocks/web_socket_connection_mock.rs new file mode 100644 index 0000000000..24620c9af2 --- /dev/null +++ b/core/tls-websocket-server/src/test/mocks/web_socket_connection_mock.rs @@ -0,0 +1,103 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{error::WebSocketResult, WebSocketConnection}; +use mio::{Event, Evented, Poll, PollOpt, Ready, Token}; +use std::vec::Vec; +use tungstenite::Message; + +/// Mock implementation of a web socket connection. +#[derive(PartialEq, Eq, Clone)] +pub(crate) struct WebSocketConnectionMock { + pub id: Token, + pub messages_to_read: Vec, + pub messages_written: Vec, + pub is_closed: bool, + socket: SocketMock, +} + +impl WebSocketConnectionMock { + #[allow(unused)] + pub fn new(id: Token) -> Self { + WebSocketConnectionMock { + id, + messages_to_read: Default::default(), + messages_written: Default::default(), + is_closed: false, + socket: SocketMock {}, + } + } + + #[allow(unused)] + pub fn with_messages_to_read(mut self, messages: Vec) -> Self { + self.messages_to_read = messages; + self + } +} + +impl WebSocketConnection for WebSocketConnectionMock { + type Socket = SocketMock; + + fn socket(&self) -> Option<&Self::Socket> { + Some(&self.socket) + } + + fn get_session_readiness(&self) -> Ready { + Ready::readable() + } + + fn on_ready(&mut self, _poll: &mut Poll, _ev: &Event) -> WebSocketResult<()> { + Ok(()) + } + + fn is_closed(&self) -> bool { + self.is_closed + } + + fn token(&self) -> Token { + self.id + } +} + +#[derive(PartialEq, Eq, Clone)] +pub(crate) struct SocketMock; + +impl Evented for SocketMock { + fn register( + &self, + _poll: &Poll, + _token: Token, + _interest: Ready, + _opts: PollOpt, + ) -> std::io::Result<()> { + Ok(()) + } + + fn reregister( + &self, + _poll: &Poll, + _token: Token, + _interest: Ready, + _opts: PollOpt, + ) -> std::io::Result<()> { + Ok(()) + } + + fn deregister(&self, _poll: &Poll) -> std::io::Result<()> { + Ok(()) + } +} diff --git a/core/tls-websocket-server/src/test/mocks/web_socket_handler_mock.rs b/core/tls-websocket-server/src/test/mocks/web_socket_handler_mock.rs new file mode 100644 index 0000000000..b77491f41d --- /dev/null +++ b/core/tls-websocket-server/src/test/mocks/web_socket_handler_mock.rs @@ -0,0 +1,61 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +#[cfg(feature = "sgx")] +use std::sync::SgxRwLock as RwLock; + +#[cfg(feature = "std")] +use std::sync::RwLock; + +use crate::{ConnectionToken, WebSocketMessageHandler, WebSocketResult}; +use log::debug; +use std::{collections::VecDeque, string::String, vec::Vec}; + +pub struct WebSocketHandlerMock { + pub responses: RwLock>, + pub messages_handled: RwLock>, +} + +impl WebSocketHandlerMock { + pub fn from_response_sequence(responses: VecDeque) -> Self { + WebSocketHandlerMock { + responses: RwLock::new(responses), + messages_handled: Default::default(), + } + } + + pub fn get_handled_messages(&self) -> Vec<(ConnectionToken, String)> { + self.messages_handled.read().unwrap().clone() + } +} + +impl WebSocketMessageHandler for WebSocketHandlerMock { + fn handle_message( + &self, + connection_token: ConnectionToken, + message: String, + ) -> WebSocketResult> { + let mut handled_messages_lock = self.messages_handled.write().unwrap(); + + debug!("Handling message: {}", message); + handled_messages_lock.push((connection_token, message)); + + let next_response = self.responses.write().unwrap().pop_front(); + + Ok(next_response) + } +} diff --git a/sidechain/top-pool/src/mocks/mod.rs b/core/tls-websocket-server/src/test/mod.rs similarity index 94% rename from sidechain/top-pool/src/mocks/mod.rs rename to core/tls-websocket-server/src/test/mod.rs index 7e9ed7f0aa..0d2c1da1d4 100644 --- a/sidechain/top-pool/src/mocks/mod.rs +++ b/core/tls-websocket-server/src/test/mod.rs @@ -15,4 +15,5 @@ */ -pub mod rpc_responder_mock; +pub mod fixtures; +pub mod mocks; diff --git a/core/tls-websocket-server/src/common.rs b/core/tls-websocket-server/src/tls_common.rs similarity index 98% rename from core/tls-websocket-server/src/common.rs rename to core/tls-websocket-server/src/tls_common.rs index 02d74582a0..6a918505a0 100644 --- a/core/tls-websocket-server/src/common.rs +++ b/core/tls-websocket-server/src/tls_common.rs @@ -18,16 +18,16 @@ #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; -use crate::{WebSocketError, WebSocketResult}; -use rustls::NoClientAuth; -use std::{format, io::BufReader, string::ToString, sync::Arc, vec, vec::Vec}; - #[cfg(feature = "std")] use std::fs; #[cfg(all(not(feature = "std"), feature = "sgx"))] use std::untrusted::fs; +use crate::{error::WebSocketError, WebSocketResult}; +use rustls::NoClientAuth; +use std::{format, io::BufReader, string::ToString, sync::Arc, vec, vec::Vec}; + pub fn make_config(cert: &str, key: &str) -> WebSocketResult> { let mut config = rustls::ServerConfig::new(NoClientAuth::new()); diff --git a/core/tls-websocket-server/src/ws_server.rs b/core/tls-websocket-server/src/ws_server.rs index 3f925ec247..33ae2d6626 100644 --- a/core/tls-websocket-server/src/ws_server.rs +++ b/core/tls-websocket-server/src/ws_server.rs @@ -18,71 +18,410 @@ #[cfg(all(not(feature = "std"), feature = "sgx"))] use crate::sgx_reexport_prelude::*; +#[cfg(feature = "sgx")] +use std::sync::{SgxMutex as Mutex, SgxRwLock as RwLock}; + +#[cfg(feature = "std")] +use std::sync::{Mutex, RwLock}; + use crate::{ - common::make_config, connection::TungsteniteWsConnection, WebSocketError, WebSocketHandler, - WebSocketResult, WebSocketServer, + config_provider::ProvideServerConfig, + connection::TungsteniteWsConnection, + connection_id_generator::GenerateConnectionId, + error::{WebSocketError, WebSocketResult}, + ConnectionIdGenerator, ConnectionToken, WebSocketConnection, WebSocketMessageHandler, + WebSocketResponder, WebSocketServer, }; use log::*; +use mio::{ + event::{Event, Evented}, + net::TcpListener, + Poll, +}; +use mio_extras::channel::{channel, Receiver, Sender}; use net::SocketAddr; -use rustls::ServerSession; -use std::{net, net::TcpListener, string::String, sync::Arc}; +use rustls::ServerConfig; +use std::{collections::HashMap, format, net, string::String, sync::Arc}; -/// Secure web-socket server implementation using the tungstenite library -pub struct TungsteniteWsServer { +// Default tokens for the server. +pub(crate) const NEW_CONNECTIONS_LISTENER: mio::Token = mio::Token(0); +pub(crate) const SERVER_SIGNAL_TOKEN: mio::Token = mio::Token(1); + +/// Secure web-socket server implementation using the Tungstenite library. +pub struct TungsteniteWsServer { ws_address: String, - cert_path: String, - private_key_path: String, + config_provider: Arc, + connection_handler: Arc, + id_generator: ConnectionIdGenerator, + connections: RwLock>>, + signal_sender: Mutex>>, } -impl TungsteniteWsServer { - pub fn new(ws_address: String, cert_path: String, private_key_path: String) -> Self { - TungsteniteWsServer { ws_address, cert_path, private_key_path } +impl TungsteniteWsServer +where + ConfigProvider: ProvideServerConfig, + Handler: WebSocketMessageHandler, +{ + pub fn new( + ws_address: String, + config_provider: Arc, + connection_handler: Arc, + ) -> Self { + TungsteniteWsServer { + ws_address, + config_provider, + connection_handler, + id_generator: ConnectionIdGenerator::default(), + connections: Default::default(), + signal_sender: Default::default(), + } + } + + fn accept_connection( + &self, + poll: &mut Poll, + tcp_listener: &TcpListener, + tls_config: Arc, + ) -> WebSocketResult<()> { + let (socket, addr) = tcp_listener.accept()?; + + debug!("Accepting new connection from {:?}", addr); + + let tls_session = rustls::ServerSession::new(&tls_config); + let connection_id = self.id_generator.next_id()?; + let token = mio::Token(connection_id); + trace!("New connection has token {:?}", token); + + let mut web_socket_connection = TungsteniteWsConnection::new( + socket, + tls_session, + token, + self.connection_handler.clone(), + )?; + + trace!("Web-socket connection created"); + web_socket_connection.register(poll)?; + + let mut connections_lock = + self.connections.write().map_err(|_| WebSocketError::LockPoisoning)?; + connections_lock.insert(token, web_socket_connection); + + trace!("Successfully accepted connection"); + Ok(()) + } + + fn connection_event(&self, poll: &mut mio::Poll, event: &Event) -> WebSocketResult<()> { + let token = event.token(); + + let mut connections_lock = + self.connections.write().map_err(|_| WebSocketError::LockPoisoning)?; + + if let Some(connection) = connections_lock.get_mut(&token) { + connection.on_ready(poll, event)?; + + if connection.is_closed() { + trace!("Connection {:?} is closed, removing", token); + connections_lock.remove(&token); + } + } + + Ok(()) + } + + fn handle_server_signal( + &self, + poll: &mut mio::Poll, + event: &Event, + signal_receiver: &mut Receiver, + ) -> WebSocketResult { + let signal = signal_receiver.try_recv()?; + + let initiate_shut_down = match signal { + ServerSignal::ShutDown => true, + }; + + signal_receiver.reregister( + poll, + event.token(), + mio::Ready::readable(), + mio::PollOpt::level(), + )?; + + Ok(initiate_shut_down) + } + + fn register_server_signal_sender(&self, sender: Sender) -> WebSocketResult<()> { + let mut sender_lock = + self.signal_sender.lock().map_err(|_| WebSocketError::LockPoisoning)?; + *sender_lock = Some(sender); + Ok(()) } } -impl WebSocketServer for TungsteniteWsServer { - type Connection = TungsteniteWsConnection; +impl WebSocketServer for TungsteniteWsServer +where + ConfigProvider: ProvideServerConfig, + Handler: WebSocketMessageHandler, +{ + type Connection = TungsteniteWsConnection; - fn run(&self, handler: Arc) -> WebSocketResult<()> - where - Handler: WebSocketHandler, - { + fn run(&self) -> WebSocketResult<()> { debug!("Running tungstenite web socket server on {}", self.ws_address); let socket_addr: SocketAddr = self.ws_address.parse().map_err(WebSocketError::InvalidWsAddress)?; - let config = make_config(self.cert_path.as_str(), self.private_key_path.as_str())?; + let config = self.config_provider.get_config()?; + + let (server_signal_sender, mut shutdown_receiver) = channel::(); + self.register_server_signal_sender(server_signal_sender)?; + + let tcp_listener = TcpListener::bind(&socket_addr).map_err(WebSocketError::TcpBindError)?; + let mut poll = Poll::new()?; + poll.register( + &tcp_listener, + NEW_CONNECTIONS_LISTENER, + mio::Ready::readable(), + mio::PollOpt::level(), + )?; + + poll.register( + &shutdown_receiver, + SERVER_SIGNAL_TOKEN, + mio::Ready::readable(), + mio::PollOpt::level(), + )?; + + let mut events = mio::Events::with_capacity(1024); + + // Run the event loop. + 'outer_event_loop: loop { + poll.poll(&mut events, None)?; + + for event in events.iter() { + match event.token() { + NEW_CONNECTIONS_LISTENER => { + trace!("Received new connection event"); + if let Err(e) = + self.accept_connection(&mut poll, &tcp_listener, config.clone()) + { + error!("Failed to accept new web-socket connection: {:?}", e); + } + }, + SERVER_SIGNAL_TOKEN => { + trace!("Received server signal event"); + if self.handle_server_signal(&mut poll, &event, &mut shutdown_receiver)? { + break 'outer_event_loop + } + }, + _ => { + trace!("Connection (token {:?}) activity event", event.token()); + if let Err(e) = self.connection_event(&mut poll, &event) { + error!("Failed to process connection event: {:?}", e); + } + }, + } + } + } + + info!("Web-socket server has shut down"); + Ok(()) + } + + fn shut_down(&self) -> WebSocketResult<()> { + info!("Shutdown request of web-socket server detected, shutting down.."); + match self.signal_sender.lock().map_err(|_| WebSocketError::LockPoisoning)?.as_ref() { + None => { + warn!( + "Signal sender has not been initialized, cannot send web-socket server signal" + ); + }, + Some(signal_sender) => { + signal_sender + .send(ServerSignal::ShutDown) + .map_err(|e| WebSocketError::Other(format!("{:?}", e).into()))?; + }, + } + + Ok(()) + } +} + +impl WebSocketResponder for TungsteniteWsServer +where + ConfigProvider: ProvideServerConfig, + Handler: WebSocketMessageHandler, +{ + fn send_message( + &self, + connection_token: ConnectionToken, + message: String, + ) -> WebSocketResult<()> { + let mut connections_lock = + self.connections.write().map_err(|_| WebSocketError::LockPoisoning)?; + let connection = connections_lock + .get_mut(&connection_token.into()) + .ok_or(WebSocketError::InvalidConnection(connection_token.0))?; + connection.write_message(message) + } +} + +/// Internal server signal enum. +enum ServerSignal { + ShutDown, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test::{ + fixtures::{no_cert_verifier::NoCertVerifier, test_server::create_server}, + mocks::web_socket_handler_mock::WebSocketHandlerMock, + }; + use alloc::collections::VecDeque; + use rustls::ClientConfig; + use std::{net::TcpStream, thread, time::Duration}; + use tungstenite::{ + client_tls_with_config, stream::MaybeTlsStream, Connector, Message, WebSocket, + }; + use url::Url; + + #[test] + fn server_handles_multiple_connections() { + let _ = env_logger::builder().is_test(true).try_init(); + + let expected_answer = "websocket server response bidibibup".to_string(); + let port: u16 = 21777; + const NUMBER_OF_CONNECTIONS: usize = 20; + + let responses: VecDeque<_> = + (0..NUMBER_OF_CONNECTIONS).map(|_| expected_answer.clone()).collect(); + let (server, handler) = create_server(responses, port); + + let server_clone = server.clone(); + let server_join_handle = thread::spawn(move || server_clone.run()); + + // Wait until server is up. + thread::sleep(std::time::Duration::from_millis(50)); + + // Spawn multiple clients that connect to the server simultaneously and send a message. + let client_handles: Vec<_> = (0..NUMBER_OF_CONNECTIONS) + .map(|_| { + let expected_answer_clone = expected_answer.clone(); + + thread::spawn(move || { + let mut socket = connect_tls_client(get_server_addr(port).as_str()); + socket + .write_message(Message::Text("Hello WebSocket".into())) + .expect("client write message to be successful"); + + assert_eq!( + Message::Text(expected_answer_clone), + socket.read_message().unwrap() + ); + }) + }) + .collect(); + + for handle in client_handles.into_iter() { + handle.join().expect("client handle to be joined"); + } + + server.shut_down().unwrap(); + + let server_shutdown_result = + server_join_handle.join().expect("Couldn't join on the associated thread"); + if let Err(e) = server_shutdown_result { + panic!("Test failed, web-socket returned error: {:?}", e); + } + + assert_eq!(NUMBER_OF_CONNECTIONS, handler.get_handled_messages().len()); + } + + #[test] + fn server_sends_update_message_to_client() { + let _ = env_logger::builder().is_test(true).try_init(); - let listener = TcpListener::bind(&socket_addr).map_err(WebSocketError::TcpBindError)?; + let expected_answer = "first response".to_string(); + let port: u16 = 21778; + let (server, handler) = create_server(VecDeque::from([expected_answer.clone()]), port); + let server_clone = server.clone(); + let server_join_handle = thread::spawn(move || server_clone.run()); + + // Wait until server is up. + thread::sleep(std::time::Duration::from_millis(50)); + + let update_message = "Message update".to_string(); + let update_message_clone = update_message.clone(); + + let client_join_handle = thread::spawn(move || { + let mut socket = connect_tls_client(get_server_addr(port).as_str()); + socket + .write_message(Message::Text("First request".into())) + .expect("client write message to be successful"); + + assert_eq!(Message::Text(expected_answer), socket.read_message().unwrap()); + assert_eq!(Message::Text(update_message_clone), socket.read_message().unwrap()); + }); + + let connection_token = poll_handler_for_first_connection(handler.as_ref()); + + // Send reply to a wrong connection token, should fail. + assert!(server + .send_message( + ConnectionToken(connection_token.0 + 1), + "wont get to the client".to_string() + ) + .is_err()); + + // Send reply to the correct connection token. + server.send_message(connection_token, update_message).unwrap(); + + client_join_handle.join().unwrap(); + server.shut_down().unwrap(); + server_join_handle.join().unwrap().unwrap(); + + assert_eq!(1, handler.get_handled_messages().len()); + } + + // Ignored because it does not directly test any of our own components. + // It was used to test the behavior of the tungstenite client configuration with certificates. + #[test] + #[ignore] + fn client_test() { + let mut socket = connect_tls_client("ws.ifelse.io:443"); + + socket + .write_message(Message::Text("Hello WebSocket".into())) + .expect("client write message to be successful"); + } + + fn poll_handler_for_first_connection(handler: &WebSocketHandlerMock) -> ConnectionToken { loop { - let stream_result = listener.accept(); - - match stream_result { - Ok((stream, _)) => { - let cloned_config = config.clone(); - - let server_session = ServerSession::new(&cloned_config); - - let connection = match TungsteniteWsConnection::connect(stream, server_session) - { - Ok(c) => c, - Err(e) => { - error!("failed to establish web-socket connection: {:?}", e); - continue - }, - }; - - // continue serving requests, even if there is an error in handling a specific connection - if let Err(handler_error) = handler.handle(connection) { - error!("web-socket request failed: {:?}", handler_error); - } - }, - Err(e) => { - warn!("failed to establish web-socket connection ({:?})", e) - }, + match handler.get_handled_messages().first() { + None => thread::sleep(Duration::from_millis(5)), + Some(m) => return m.0, } } } + + fn get_server_addr(port: u16) -> String { + format!("localhost:{}", port) + } + + fn connect_tls_client(server_addr: &str) -> WebSocket> { + let ws_server_url = Url::parse(format!("wss://{}", server_addr).as_str()).unwrap(); + + let mut config = ClientConfig::new(); + config.dangerous().set_certificate_verifier(Arc::new(NoCertVerifier {})); + let connector = Connector::Rustls(Arc::new(config)); + let stream = TcpStream::connect(server_addr).unwrap(); + + let (socket, _response) = + client_tls_with_config(ws_server_url, stream, None, Some(connector)) + .expect("Can't connect"); + + socket + } } diff --git a/enclave-runtime/Cargo.lock b/enclave-runtime/Cargo.lock index 34640f2356..25982b13dd 100644 --- a/enclave-runtime/Cargo.lock +++ b/enclave-runtime/Cargo.lock @@ -9,31 +9,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" dependencies = [ "lazy_static", - "regex 1.5.4", + "regex 1.5.5", ] [[package]] name = "ac-compose-macros" version = "0.1.0" -source = "git+https://github.com/scs/substrate-api-client?branch=master#7b6b822e2826e384346a9ade9585a6f96f4951ca" +source = "git+https://github.com/scs/substrate-api-client?branch=polkadot-v0.9.19#7f3912223c7cd209bddeecd0ba8fa4ff074af65d" dependencies = [ "ac-primitives", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "ac-primitives" version = "0.1.0" -source = "git+https://github.com/scs/substrate-api-client?branch=master#7b6b822e2826e384346a9ade9585a6f96f4951ca" +source = "git+https://github.com/scs/substrate-api-client?branch=polkadot-v0.9.19#7f3912223c7cd209bddeecd0ba8fa4ff074af65d" dependencies = [ "hex", "parity-scale-codec", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -74,7 +74,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ "getrandom 0.2.3", - "once_cell 1.9.0", + "once_cell 1.10.0", "version_check", ] @@ -93,7 +93,18 @@ version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ - "memchr 2.4.1", + "memchr 2.5.0", +] + +[[package]] +name = "ajuna-common" +version = "0.1.0" +source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=validateer-setup#97f5586b0e7be4468883d1a27f30eda6e6c0cbfe" +dependencies = [ + "frame-support", + "parity-scale-codec", + "scale-info", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -125,15 +136,18 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "autocfg" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "base-x" @@ -184,9 +198,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitvec" -version = "0.20.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +checksum = "1489fcb93a5bb47da0462ca93ad252ad6af2145cce58d10d46a83931ba9f016b" dependencies = [ "funty", "radium", @@ -194,6 +208,15 @@ dependencies = [ "wyz", ] +[[package]] +name = "blake2" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388" +dependencies = [ + "digest 0.10.3", +] + [[package]] name = "blake2-rfc" version = "0.2.18" @@ -250,9 +273,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d36a02058e76b040de25a4464ba1c80935655595b661505c8b39b664828b95" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" dependencies = [ "generic-array 0.14.5", ] @@ -280,9 +303,9 @@ checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" [[package]] name = "byte-slice-cast" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d30c751592b77c499e7bce34d99d67c2c11bdc0574e9a488ddade14150a4698" +checksum = "87c5fdd0166095e1d463fc6cc01aa8ce547ad77a4e84d42eb6762b084e28067e" [[package]] name = "byte-tools" @@ -314,9 +337,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.72" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" [[package]] name = "cfg-if" @@ -346,8 +369,8 @@ version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" dependencies = [ - "num-integer 0.1.44", - "num-traits 0.2.14", + "num-integer 0.1.45", + "num-traits 0.2.15", ] [[package]] @@ -383,9 +406,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "cpufeatures" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" dependencies = [ "libc", ] @@ -398,21 +421,12 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-common" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d6b536309245c849479fba3da410962a43ed8e51c26b729208ec0ac2798d0" -dependencies = [ - "generic-array 0.14.5", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" dependencies = [ "generic-array 0.14.5", - "subtle", + "typenum", ] [[package]] @@ -455,9 +469,9 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", "proc-macro2", - "quote 1.0.15", + "quote 1.0.18", "rustc_version 0.4.0", - "syn 1.0.86", + "syn 1.0.92", ] [[package]] @@ -480,20 +494,20 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b697d66081d42af4fba142d56918a3cb21dc8eb63372c6b85d14f44fb9c5979b" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ - "block-buffer 0.10.0", + "block-buffer 0.10.2", "crypto-common", - "generic-array 0.14.5", + "subtle", ] [[package]] name = "ed25519" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74e1069e39f1454367eb2de793ed062fac4c35c2934b76a81d90dd9abcd28816" +checksum = "3d5c4b5e5959dc2c2b89918d8e2cc40fcdd623cef026ed09d2f0ee05199dc8e4" dependencies = [ "signature", ] @@ -551,10 +565,11 @@ dependencies = [ "itp-stf-executor", "itp-stf-state-handler", "itp-storage", - "itp-storage-verifier", "itp-teerex-storage", "itp-test", "itp-time-utils", + "itp-top-pool", + "itp-top-pool-author", "itp-types", "its-sidechain", "jsonrpc-core", @@ -568,7 +583,6 @@ dependencies = [ "retain_mut", "rust-base58", "rustls 0.19.0 (git+https://github.com/mesalock-linux/rustls?rev=sgx_1.1.3)", - "sc-utils", "serde 1.0.118 (git+https://github.com/mesalock-linux/serde-sgx?tag=sgx_1.1.3)", "serde_derive 1.0.118", "serde_json 1.0.60 (git+https://github.com/mesalock-linux/serde-json-sgx?tag=sgx_1.1.3)", @@ -590,7 +604,7 @@ dependencies = [ "sp-finality-grandpa", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-version", "substrate-api-client", "webpki", @@ -624,13 +638,13 @@ checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" [[package]] name = "finality-grandpa" -version = "0.14.4" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8ac3ff5224ef91f3c97e03eb1de2db82743427e91aaa5ac635f454f0b164f5a" +checksum = "d9def033d8505edf199f6a5d07aa7e6d2d6185b164293b77f0efd108f4f3e11d" dependencies = [ "either", - "futures 0.3.19", - "num-traits 0.2.14", + "futures 0.3.21", + "num-traits 0.2.15", "parity-scale-codec", "scale-info", ] @@ -652,16 +666,10 @@ dependencies = [ "hashbrown 0.3.1", ] -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - [[package]] name = "frame-executive" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -670,37 +678,28 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-tracing", ] [[package]] name = "frame-metadata" -version = "14.2.0" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ed5e5c346de62ca5c184b4325a6600d1eaca210666e4606fe4e449574978d0" +checksum = "df6bb8542ef006ef0de09a5c4420787d79823c0ed7924225822362fd2bf2ff2d" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", "scale-info", ] -[[package]] -name = "frame-metadata" -version = "14.2.0" -source = "git+https://github.com/paritytech/frame-metadata.git?branch=main#f0c7151a950f9e1002bf40d260ec32032f76d7ed" -dependencies = [ - "cfg-if 1.0.0", - "parity-scale-codec", -] - [[package]] name = "frame-support" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "bitflags", - "frame-metadata 14.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "frame-metadata", "frame-support-procedural", "impl-trait-for-tuples", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", @@ -715,7 +714,7 @@ dependencies = [ "sp-io", "sp-runtime", "sp-staking", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-tracing", "tt-call", ] @@ -723,41 +722,41 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "Inflector", "frame-support-procedural-tools", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "frame-support-procedural-tools" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "frame-support-procedural-tools-derive" version = "3.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "frame-system" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", @@ -766,14 +765,14 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-version", ] [[package]] name = "frame-system-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "sp-api", @@ -781,9 +780,9 @@ dependencies = [ [[package]] name = "funty" -version = "1.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" @@ -792,7 +791,7 @@ source = "git+https://github.com/mesalock-linux/futures-rs-sgx#d54882f24ddf7d613 dependencies = [ "futures-channel 0.3.8", "futures-core 0.3.8", - "futures-executor 0.3.8", + "futures-executor", "futures-io 0.3.8", "futures-sink 0.3.8", "futures-task 0.3.8", @@ -802,17 +801,16 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" +checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" dependencies = [ - "futures-channel 0.3.19", - "futures-core 0.3.19", - "futures-executor 0.3.19", - "futures-io 0.3.19", - "futures-sink 0.3.19", - "futures-task 0.3.19", - "futures-util 0.3.19", + "futures-channel 0.3.21", + "futures-core 0.3.21", + "futures-io 0.3.21", + "futures-sink 0.3.21", + "futures-task 0.3.21", + "futures-util 0.3.21", ] [[package]] @@ -827,12 +825,12 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ - "futures-core 0.3.19", - "futures-sink 0.3.19", + "futures-core 0.3.21", + "futures-sink 0.3.21", ] [[package]] @@ -845,9 +843,9 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" @@ -860,17 +858,6 @@ dependencies = [ "sgx_tstd", ] -[[package]] -name = "futures-executor" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" -dependencies = [ - "futures-core 0.3.19", - "futures-task 0.3.19", - "futures-util 0.3.19", -] - [[package]] name = "futures-io" version = "0.3.8" @@ -881,9 +868,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-macro" @@ -892,19 +879,8 @@ source = "git+https://github.com/mesalock-linux/futures-rs-sgx#d54882f24ddf7d613 dependencies = [ "proc-macro-hack", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", -] - -[[package]] -name = "futures-macro" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" -dependencies = [ - "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -914,9 +890,9 @@ source = "git+https://github.com/mesalock-linux/futures-rs-sgx#d54882f24ddf7d613 [[package]] name = "futures-sink" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" @@ -929,15 +905,9 @@ dependencies = [ [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" - -[[package]] -name = "futures-timer" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" @@ -947,7 +917,7 @@ dependencies = [ "futures-channel 0.3.8", "futures-core 0.3.8", "futures-io 0.3.8", - "futures-macro 0.3.8", + "futures-macro", "futures-sink 0.3.8", "futures-task 0.3.8", "memchr 2.2.1", @@ -961,20 +931,15 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ - "futures-channel 0.3.19", - "futures-core 0.3.19", - "futures-io 0.3.19", - "futures-macro 0.3.19", - "futures-sink 0.3.19", - "futures-task 0.3.19", - "memchr 2.4.1", + "futures-core 0.3.21", + "futures-sink 0.3.21", + "futures-task 0.3.21", "pin-project-lite", "pin-utils", - "slab 0.4.5", ] [[package]] @@ -1041,18 +1006,9 @@ checksum = "29fba9abe4742d586dfd0c06ae4f7e73a1c2d86b856933509b269d82cdf06e18" [[package]] name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c21d40587b92fa6a6c6e3c1bdbf87d75511db5672f9c93175574b3a00df1758" +checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" dependencies = [ "ahash", ] @@ -1060,7 +1016,7 @@ dependencies = [ [[package]] name = "hashbrown_tstd" version = "0.11.2" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" [[package]] name = "hex" @@ -1068,34 +1024,13 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array 0.14.5", - "hmac", -] - [[package]] name = "http" version = "0.2.1" source = "git+https://github.com/integritee-network/http-sgx?branch=sgx-experimental#307b5421fb7a489a114bede0dc05c8d32b804f49" dependencies = [ "bytes", - "fnv 1.0.6", + "fnv", "itoa 0.4.5", "sgx_tstd", ] @@ -1130,9 +1065,9 @@ dependencies = [ [[package]] name = "impl-codec" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" dependencies = [ "parity-scale-codec", ] @@ -1143,27 +1078,18 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" dependencies = [ - "serde 1.0.136", + "serde 1.0.137", ] [[package]] name = "impl-trait-for-tuples" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5dacb10c5b3bb92d46ba347505a9041e676bb20ad220101326bffb0c93031ee" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if 1.0.0", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -1172,7 +1098,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770" dependencies = [ - "num-traits 0.2.14", + "num-traits 0.2.15", ] [[package]] @@ -1230,7 +1156,7 @@ dependencies = [ "jsonrpc-core", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", - "serde_json 1.0.78", + "serde_json 1.0.81", "sgx_tstd", "sgx_types", "sp-runtime", @@ -1273,7 +1199,6 @@ dependencies = [ "itp-settings", "itp-stf-executor", "itp-stf-state-handler", - "itp-storage-verifier", "itp-types", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "pallet-ajuna-gameregistry", @@ -1322,7 +1247,7 @@ dependencies = [ "itp-types", "lazy_static", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", - "num-traits 0.2.14", + "num-traits 0.2.15", "parity-scale-codec", "sgx_tstd", "sgx_types", @@ -1340,6 +1265,7 @@ version = "0.8.0" dependencies = [ "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "mio", + "mio-extras", "rustls 0.19.0 (git+https://github.com/mesalock-linux/rustls?branch=mesalock_sgx)", "sgx_tstd", "sgx_types", @@ -1387,6 +1313,7 @@ name = "itp-component-container" version = "0.8.0" dependencies = [ "sgx_tstd", + "thiserror 1.0.9", ] [[package]] @@ -1429,11 +1356,14 @@ dependencies = [ name = "itp-ocall-api" version = "0.8.0" dependencies = [ + "derive_more", + "itp-storage", "itp-types", "parity-scale-codec", "sgx_types", + "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -1454,7 +1384,7 @@ dependencies = [ "itp-types", "pallet-ajuna-gameregistry", "parity-scale-codec", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -1497,7 +1427,6 @@ dependencies = [ "itp-registry-storage", "itp-stf-state-handler", "itp-storage", - "itp-storage-verifier", "itp-test", "itp-time-utils", "itp-types", @@ -1520,6 +1449,7 @@ dependencies = [ "itp-settings", "itp-sgx-crypto", "itp-sgx-io", + "itp-time-utils", "itp-types", "lazy_static", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", @@ -1538,74 +1468,105 @@ name = "itp-storage" version = "0.8.0" dependencies = [ "derive_more", - "frame-metadata 14.2.0 (git+https://github.com/paritytech/frame-metadata.git?branch=main)", + "frame-metadata", "frame-support", "hash-db", "parity-scale-codec", "sgx_tstd", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-trie", "thiserror 1.0.9", ] [[package]] -name = "itp-storage-verifier" +name = "itp-teerex-storage" +version = "0.8.0" +dependencies = [ + "itp-storage", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", +] + +[[package]] +name = "itp-test" version = "0.8.0" dependencies = [ "derive_more", + "ita-stf", "itp-ocall-api", + "itp-sgx-crypto", + "itp-stf-state-handler", "itp-storage", + "itp-teerex-storage", + "itp-time-utils", "itp-types", + "jsonrpc-core", "parity-scale-codec", + "sgx-externalities", + "sgx_tcrypto_helper", "sgx_tstd", "sgx_types", "sp-core", "sp-runtime", - "sp-std", - "thiserror 1.0.9", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] -name = "itp-teerex-storage" +name = "itp-time-utils" version = "0.8.0" dependencies = [ - "itp-storage", - "sp-std", + "sgx_tstd", ] [[package]] -name = "itp-test" +name = "itp-top-pool" version = "0.8.0" dependencies = [ + "byteorder 1.4.3", "derive_more", "ita-stf", - "itp-ocall-api", - "itp-sgx-crypto", - "itp-stf-state-handler", - "itp-storage", - "itp-storage-verifier", - "itp-teerex-storage", - "itp-time-utils", + "itc-direct-rpc-server", "itp-types", - "its-top-pool", + "its-primitives", "jsonrpc-core", + "linked-hash-map", + "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", - "sgx-externalities", - "sgx_tcrypto_helper", + "retain_mut", + "serde 1.0.137", "sgx_tstd", "sgx_types", + "sp-application-crypto", "sp-core", "sp-runtime", - "sp-std", + "thiserror 1.0.9", ] [[package]] -name = "itp-time-utils" +name = "itp-top-pool-author" version = "0.8.0" dependencies = [ + "derive_more", + "ita-stf", + "itc-direct-rpc-server", + "itc-tls-websocket-server", + "itp-enclave-metrics", + "itp-ocall-api", + "itp-sgx-crypto", + "itp-stf-state-handler", + "itp-test", + "itp-top-pool", + "itp-types", + "jsonrpc-core", + "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", + "parity-scale-codec", + "sgx_tcrypto_helper", "sgx_tstd", + "sgx_types", + "sp-core", + "sp-runtime", + "thiserror 1.0.9", ] [[package]] @@ -1617,12 +1578,12 @@ dependencies = [ "pallet-ajuna-gameregistry", "parity-scale-codec", "primitive-types", - "serde 1.0.136", - "serde_json 1.0.78", + "serde 1.0.137", + "serde_json 1.0.81", "sgx_tstd", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "substrate-api-client", ] @@ -1635,10 +1596,10 @@ dependencies = [ "itp-sgx-crypto", "itp-stf-executor", "itp-time-utils", + "itp-top-pool-author", "itp-types", "its-primitives", "its-state", - "its-top-pool-rpc-author", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", "sgx-externalities", @@ -1665,7 +1626,6 @@ dependencies = [ "itp-sgx-crypto", "itp-stf-executor", "itp-stf-state-handler", - "itp-storage-verifier", "itp-time-utils", "itp-types", "its-block-composer", @@ -1726,16 +1686,16 @@ dependencies = [ "parity-scale-codec", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "its-rpc-handler" version = "0.8.0" dependencies = [ + "itp-top-pool-author", "itp-types", "its-primitives", - "its-top-pool-rpc-author", "jsonrpc-core", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", @@ -1756,9 +1716,7 @@ dependencies = [ "its-primitives", "its-rpc-handler", "its-state", - "its-top-pool", "its-top-pool-executor", - "its-top-pool-rpc-author", "its-validateer-fetch", ] @@ -1771,36 +1729,12 @@ dependencies = [ "its-primitives", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", - "serde 1.0.136", + "serde 1.0.137", "sgx-externalities", "sgx_tstd", "sp-core", "sp-io", - "sp-std", - "thiserror 1.0.9", -] - -[[package]] -name = "its-top-pool" -version = "0.8.0" -dependencies = [ - "byteorder 1.4.3", - "derive_more", - "ita-stf", - "itc-direct-rpc-server", - "itp-types", - "its-primitives", - "jsonrpc-core", - "linked-hash-map", - "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", - "parity-scale-codec", - "retain_mut", - "serde 1.0.136", - "sgx_tstd", - "sgx_types", - "sp-application-crypto", - "sp-core", - "sp-runtime", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "thiserror 1.0.9", ] @@ -1811,10 +1745,10 @@ dependencies = [ "ita-stf", "itp-stf-executor", "itp-time-utils", + "itp-top-pool-author", "itp-types", "its-primitives", "its-state", - "its-top-pool-rpc-author", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", "sgx-externalities", @@ -1825,33 +1759,6 @@ dependencies = [ "thiserror 1.0.9", ] -[[package]] -name = "its-top-pool-rpc-author" -version = "0.8.0" -dependencies = [ - "derive_more", - "ita-stf", - "itc-direct-rpc-server", - "itc-tls-websocket-server", - "itp-component-container", - "itp-enclave-metrics", - "itp-ocall-api", - "itp-sgx-crypto", - "itp-stf-state-handler", - "itp-test", - "itp-types", - "its-top-pool", - "jsonrpc-core", - "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", - "parity-scale-codec", - "sgx_tcrypto_helper", - "sgx_tstd", - "sgx_types", - "sp-core", - "sp-runtime", - "thiserror 1.0.9", -] - [[package]] name = "its-validateer-fetch" version = "0.8.0" @@ -1860,14 +1767,13 @@ dependencies = [ "frame-support", "itp-ocall-api", "itp-storage", - "itp-storage-verifier", "itp-teerex-storage", "itp-types", "parity-scale-codec", "sp-core", "sp-runtime", - "sp-std", - "thiserror 1.0.30", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", + "thiserror 1.0.31", ] [[package]] @@ -1880,7 +1786,7 @@ dependencies = [ "serde 1.0.118 (git+https://github.com/mesalock-linux/serde-sgx)", "serde_derive 1.0.118", "serde_json 1.0.60 (git+https://github.com/mesalock-linux/serde-json-sgx)", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=master)", ] [[package]] @@ -1898,11 +1804,17 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" -version = "0.2.117" +version = "0.2.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e74d72e0f9b65b5b4ca49a346af3976df0f9c61d550727f349ecd559f251a26c" +checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b" [[package]] name = "libsecp256k1" @@ -1913,14 +1825,11 @@ dependencies = [ "arrayref", "base64 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "digest 0.9.0", - "hmac-drbg", "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", - "rand 0.8.4", - "serde 1.0.136", - "sha2 0.9.9", - "typenum", + "rand 0.8.5", + "serde 1.0.137", ] [[package]] @@ -1960,15 +1869,6 @@ dependencies = [ "sgx_tstd", ] -[[package]] -name = "lock_api" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" -dependencies = [ - "scopeguard", -] - [[package]] name = "log" version = "0.4.14" @@ -1987,6 +1887,15 @@ dependencies = [ "sgx_tstd", ] +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "matches" version = "0.1.8" @@ -2004,18 +1913,18 @@ dependencies = [ [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memory-db" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d505169b746dacf02f7d14d8c80b34edfd8212159c63d23c977739a0d960c626" +checksum = "6566c70c1016f525ced45d7b7f97730a2bafb037c788211d0c186ef5b2189f0a" dependencies = [ "hash-db", - "hashbrown 0.11.2", + "hashbrown 0.12.1", "parity-util-mem", ] @@ -2045,6 +1954,19 @@ dependencies = [ "slab 0.4.2", ] +[[package]] +name = "mio-extras" +version = "2.0.6" +source = "git+https://github.com/integritee-network/mio-extras-sgx?rev=963234b#963234bf55e44f9efff921938255126c48deef3a" +dependencies = [ + "lazycell", + "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", + "mio", + "sgx_tstd", + "sgx_types", + "slab 0.4.6", +] + [[package]] name = "multibase" version = "0.8.0" @@ -2065,7 +1987,7 @@ dependencies = [ "digest 0.9.0", "sha-1", "sha2 0.9.9", - "sha3", + "sha3 0.9.1", "unsigned-varint", ] @@ -2103,7 +2025,7 @@ name = "num-bigint" version = "0.2.5" source = "git+https://github.com/mesalock-linux/num-bigint-sgx#76a5bed94dc31c32bd1670dbf72877abcf9bbc09" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", "num-integer 0.1.41", "num-traits 0.2.10", "sgx_tstd", @@ -2114,7 +2036,7 @@ name = "num-complex" version = "0.2.3" source = "git+https://github.com/mesalock-linux/num-complex-sgx#19700ad6de079ebc5560db472c282d1591e0d84f" dependencies = [ - "autocfg 0.1.7", + "autocfg 0.1.8", "num-traits 0.2.10", "sgx_tstd", ] @@ -2124,19 +2046,19 @@ name = "num-integer" version = "0.1.41" source = "git+https://github.com/mesalock-linux/num-integer-sgx#404c50e5378ca635261688b080dee328ff42b6bd" dependencies = [ - "autocfg 0.1.7", + "autocfg 0.1.8", "num-traits 0.2.10", "sgx_tstd", ] [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ - "autocfg 1.0.1", - "num-traits 0.2.14", + "autocfg 1.1.0", + "num-traits 0.2.15", ] [[package]] @@ -2154,7 +2076,7 @@ name = "num-rational" version = "0.2.2" source = "git+https://github.com/mesalock-linux/num-rational-sgx#be65f9ce439f3c9ec850d8041635ab6c3309b816" dependencies = [ - "autocfg 0.1.7", + "autocfg 0.1.8", "num-bigint", "num-integer 0.1.41", "num-traits 0.2.10", @@ -2166,17 +2088,17 @@ name = "num-traits" version = "0.2.10" source = "git+https://github.com/mesalock-linux/num-traits-sgx#af046e0b15c594c960007418097dd4ff37ec3f7a" dependencies = [ - "autocfg 0.1.7", + "autocfg 0.1.8", "sgx_tstd", ] [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ - "autocfg 1.0.1", + "autocfg 1.1.0", ] [[package]] @@ -2198,9 +2120,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" +checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9" [[package]] name = "opaque-debug" @@ -2216,49 +2138,53 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "pallet-ajuna-connectfour" -version = "4.0.0-dev" -source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=update-substrate-5#c32d74bee111ccb561e48d437ebff67539187357" +version = "0.1.0" +source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=validateer-setup#97f5586b0e7be4468883d1a27f30eda6e6c0cbfe" dependencies = [ + "ajuna-common", "frame-support", "frame-system", "pallet-ajuna-matchmaker", "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-ajuna-gameregistry" -version = "4.0.0-dev" -source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=update-substrate-5#c32d74bee111ccb561e48d437ebff67539187357" +version = "0.1.0" +source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=validateer-setup#97f5586b0e7be4468883d1a27f30eda6e6c0cbfe" dependencies = [ + "ajuna-common", "frame-support", "frame-system", + "log 0.4.17", "pallet-ajuna-matchmaker", "parity-scale-codec", "scale-info", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-ajuna-matchmaker" -version = "4.0.0-dev" -source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=update-substrate-5#c32d74bee111ccb561e48d437ebff67539187357" +version = "0.1.0" +source = "git+https://github.com/ajuna-network/ajuna-node.git?branch=validateer-setup#97f5586b0e7be4468883d1a27f30eda6e6c0cbfe" dependencies = [ + "ajuna-common", "frame-support", "frame-system", "parity-scale-codec", "scale-info", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-aura" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2268,13 +2194,13 @@ dependencies = [ "sp-application-crypto", "sp-consensus-aura", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-authorship" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2283,13 +2209,13 @@ dependencies = [ "scale-info", "sp-authorship", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-balances" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2297,13 +2223,13 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2319,13 +2245,13 @@ dependencies = [ "sp-runtime", "sp-session", "sp-staking", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-parentchain" version = "0.9.0" -source = "git+https://github.com/integritee-network/pallets.git?branch=master#817fe7570330f64cfe3d35edef34d51201e5aa58" +source = "git+https://github.com/integritee-network/pallets.git?branch=polkadot-v0.9.19#cac00b08a420043bd09197ba6b6356ee9b542e33" dependencies = [ "frame-support", "frame-system", @@ -2335,13 +2261,13 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-randomness-collective-flip" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2349,13 +2275,13 @@ dependencies = [ "safe-mix", "scale-info", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-scheduler" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2364,13 +2290,13 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-session" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2384,13 +2310,13 @@ dependencies = [ "sp-runtime", "sp-session", "sp-staking", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-sudo" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2398,13 +2324,13 @@ dependencies = [ "scale-info", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2413,14 +2339,14 @@ dependencies = [ "scale-info", "sp-inherents", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-timestamp", ] [[package]] name = "pallet-transaction-payment" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "frame-support", "frame-system", @@ -2430,13 +2356,13 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -2446,38 +2372,38 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "2.3.1" +version = "3.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +checksum = "e8b44461635bbb1a0300f100a841e571e7d919c81c73075ef5d152ffdb521066" dependencies = [ "arrayvec 0.7.2", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", - "serde 1.0.136", + "serde 1.0.137", ] [[package]] name = "parity-scale-codec-derive" -version = "2.3.1" +version = "3.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +checksum = "c45ed1f39709f5a89338fab50e59816b2e8815f5bb58276e7ddf9afd495f73f8" dependencies = [ "proc-macro-crate", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "parity-util-mem" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f4cb4e169446179cbc6b8b6320cc9fca49bd2e94e8db25f25f200a8ea774770" +checksum = "c32561d248d352148124f036cac253a644685a21dc9fea383eb4907d7bd35a8f" dependencies = [ "cfg-if 1.0.0", - "hashbrown 0.11.2", + "hashbrown 0.12.1", "impl-trait-for-tuples", "parity-util-mem-derive", "primitive-types", @@ -2491,40 +2417,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" dependencies = [ "proc-macro2", - "syn 1.0.86", + "syn 1.0.92", "synstructure", ] -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall", - "smallvec 1.8.0", - "winapi", -] - [[package]] name = "paste" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5" +checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" [[package]] name = "percent-encoding" @@ -2533,9 +2434,9 @@ source = "git+https://github.com/mesalock-linux/rust-url-sgx?tag=sgx_1.1.3#23832 [[package]] name = "pin-project-lite" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -2550,7 +2451,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a25c0b0ae06fcffe600ad392aabfa535696c8973f2253d9ac83171924c58a858" dependencies = [ "postcard-cobs", - "serde 1.0.136", + "serde 1.0.137", ] [[package]] @@ -2572,9 +2473,9 @@ checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] name = "primitive-types" -version = "0.10.1" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +checksum = "e28720988bff275df1f51b171e1b2a18c30d194c4d2b61defdacecd625a5d94a" dependencies = [ "fixed-hash", "impl-codec", @@ -2585,11 +2486,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.0" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ebace6889caf889b4d3f76becee12e90353f2b8c7d875534a71e5742f8f6f83" +checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" dependencies = [ - "thiserror 1.0.30", + "thiserror 1.0.31", "toml", ] @@ -2607,25 +2508,11 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" -version = "1.0.36" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +checksum = "9027b48e9d4c9175fa2218adf3557f91c1137021739951d4932f5f8268ac48aa" dependencies = [ - "unicode-xid 0.2.2", -] - -[[package]] -name = "prometheus" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7f64969ffd5dd8f39bd57a68ac53c163a095ed9d0fb707146da1b27025a3504" -dependencies = [ - "cfg-if 1.0.0", - "fnv 1.0.7", - "lazy_static", - "memchr 2.4.1", - "parking_lot", - "thiserror 1.0.30", + "unicode-xid 0.2.3", ] [[package]] @@ -2650,18 +2537,18 @@ checksum = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" [[package]] name = "quote" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" dependencies = [ "proc-macro2", ] [[package]] name = "radium" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" [[package]] name = "rand" @@ -2687,9 +2574,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "rand_core 0.6.3", ] @@ -2744,33 +2631,24 @@ dependencies = [ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "redox_syscall" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" -dependencies = [ - "bitflags", -] - [[package]] name = "ref-cast" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "300f2a835d808734ee295d45007adacb9ebb29dd3ae2424acfa17930cae541da" +checksum = "685d58625b6c2b83e4cc88a27c4bf65adb7b6b16dbdc413e515c9405b47432ab" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c38e3aecd2b21cb3959637b883bb3714bc7e43f0268b9a29d3743ee3e55cdd2" +checksum = "a043824e29c94169374ac5183ac0ed43f5724dc4556b19568007486bd840fa1f" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -2787,12 +2665,12 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" dependencies = [ "aho-corasick 0.7.18", - "memchr 2.4.1", + "memchr 2.5.0", "regex-syntax 0.6.25", ] @@ -2812,9 +2690,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] name = "retain_mut" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51dd4445360338dab5116712bee1388dc727991d51969558a8882ab552e6db30" +checksum = "8c31b5c4033f8fdde8700e4657be2c497e7288f01515be52168c631e2e4d4086" [[package]] name = "ring" @@ -2851,7 +2729,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.4", + "semver 1.0.9", ] [[package]] @@ -2908,23 +2786,11 @@ dependencies = [ "rustc_version 0.2.3", ] -[[package]] -name = "sc-utils" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" -dependencies = [ - "futures 0.3.19", - "futures-timer", - "lazy_static", - "parking_lot", - "prometheus", -] - [[package]] name = "scale-info" -version = "1.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c55b744399c25532d63a0d2789b109df8d46fc93752d46b0782991a931a782f" +checksum = "8980cafbe98a7ee7a9cc16b32ebce542c77883f512d83fbf2ddc8f6a85ea74c9" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -2934,14 +2800,14 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "1.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baeb2780690380592f86205aa4ee49815feb2acad8c2f59e6dd207148c3f1fcd" +checksum = "4260c630e8a8a33429d1688eff2f163f24c65a4e1b1578ef6b565061336e4b6f" dependencies = [ "proc-macro-crate", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -2960,12 +2826,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - [[package]] name = "sct" version = "0.6.0" @@ -2976,6 +2836,24 @@ dependencies = [ "untrusted", ] +[[package]] +name = "secp256k1" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + [[package]] name = "secrecy" version = "0.8.0" @@ -2996,9 +2874,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.4" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" +checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd" [[package]] name = "semver-parser" @@ -3025,11 +2903,11 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" dependencies = [ - "serde_derive 1.0.136", + "serde_derive 1.0.137", ] [[package]] @@ -3047,19 +2925,19 @@ version = "1.0.118" source = "git+https://github.com/mesalock-linux/serde-sgx#db0226f1d5d70fca6b96af2c285851502204e21c" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "serde_derive" -version = "1.0.136" +version = "1.0.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -3086,26 +2964,26 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.78" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" +checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" dependencies = [ "itoa 1.0.1", "ryu", - "serde 1.0.136", + "serde 1.0.137", ] [[package]] name = "sgx-externalities" version = "0.4.0" -source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#5063174549c1464e5a5e08ba0a8e88c837888326" +source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#3458223c117826f99daeef0a4c05214a3188adf4" dependencies = [ "derive_more", "environmental", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", "postcard", - "serde 1.0.136", + "serde 1.0.137", "sgx_tstd", "sgx_types", ] @@ -3113,7 +2991,7 @@ dependencies = [ [[package]] name = "sgx-runtime" version = "0.8.0" -source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#5063174549c1464e5a5e08ba0a8e88c837888326" +source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#3458223c117826f99daeef0a4c05214a3188adf4" dependencies = [ "frame-executive", "frame-support", @@ -3140,7 +3018,7 @@ dependencies = [ "sp-offchain", "sp-runtime", "sp-session", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-transaction-pool", "sp-version", ] @@ -3148,12 +3026,12 @@ dependencies = [ [[package]] name = "sgx_alloc" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" [[package]] name = "sgx_backtrace_sys" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "cc", "sgx_build_helper", @@ -3163,12 +3041,12 @@ dependencies = [ [[package]] name = "sgx_build_helper" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" [[package]] name = "sgx_crypto_helper" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "itertools", "serde 1.0.118 (git+https://github.com/mesalock-linux/serde-sgx)", @@ -3182,12 +3060,12 @@ dependencies = [ [[package]] name = "sgx_demangle" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" [[package]] name = "sgx_libc" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_types", ] @@ -3195,7 +3073,7 @@ dependencies = [ [[package]] name = "sgx_rand" version = "1.1.4" -source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_trts", "sgx_tstd", @@ -3205,7 +3083,7 @@ dependencies = [ [[package]] name = "sgx_serialize" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_tstd", ] @@ -3213,7 +3091,7 @@ dependencies = [ [[package]] name = "sgx_serialize_derive" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "quote 0.3.15", "sgx_serialize_derive_internals", @@ -3223,7 +3101,7 @@ dependencies = [ [[package]] name = "sgx_serialize_derive_internals" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "syn 0.11.11", ] @@ -3231,7 +3109,7 @@ dependencies = [ [[package]] name = "sgx_tcrypto" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_types", ] @@ -3239,7 +3117,7 @@ dependencies = [ [[package]] name = "sgx_tcrypto_helper" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_crypto_helper", ] @@ -3247,7 +3125,7 @@ dependencies = [ [[package]] name = "sgx_tprotected_fs" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_trts", "sgx_types", @@ -3256,7 +3134,7 @@ dependencies = [ [[package]] name = "sgx_trts" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_libc", "sgx_types", @@ -3265,7 +3143,7 @@ dependencies = [ [[package]] name = "sgx_tse" version = "1.1.4" -source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_types", ] @@ -3273,7 +3151,7 @@ dependencies = [ [[package]] name = "sgx_tseal" version = "1.1.4" -source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_tcrypto", "sgx_trts", @@ -3284,7 +3162,7 @@ dependencies = [ [[package]] name = "sgx_tstd" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "hashbrown_tstd", "sgx_alloc", @@ -3300,7 +3178,7 @@ dependencies = [ [[package]] name = "sgx_tunittest" version = "1.1.4" -source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/apache/teaclave-sgx-sdk.git?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_tstd", ] @@ -3308,12 +3186,12 @@ dependencies = [ [[package]] name = "sgx_types" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" [[package]] name = "sgx_unwind" version = "1.1.4" -source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#565960cd7b4b36d1188459d75652619971c43f7e" +source = "git+https://github.com/haerdib/incubator-teaclave-sgx-sdk?branch=master#08264d6bff679d6047e5e9bc36058b4475c58ed4" dependencies = [ "sgx_build_helper", ] @@ -3366,13 +3244,13 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99c3bd8169c58782adad9290a9af5939994036b76187f7b4f0e6de91dbbfc0ec" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.1", + "digest 0.10.3", ] [[package]] @@ -3387,6 +3265,16 @@ dependencies = [ "opaque-debug 0.3.0", ] +[[package]] +name = "sha3" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "881bf8156c87b6301fc5ca6b27f11eeb2761224c7081e69b409d5a1951a70c86" +dependencies = [ + "digest 0.10.3", + "keccak", +] + [[package]] name = "signature" version = "1.5.0" @@ -3403,9 +3291,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" +checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" [[package]] name = "smallvec" @@ -3424,82 +3312,82 @@ checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" [[package]] name = "sp-api" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "parity-scale-codec", "sp-api-proc-macro", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-version", ] [[package]] name = "sp-api-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ - "blake2-rfc", + "blake2", "proc-macro-crate", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "sp-application-crypto" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", "sp-core", "sp-io", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-arithmetic" -version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "5.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "integer-sqrt", - "num-traits 0.2.14", + "num-traits 0.2.15", "parity-scale-codec", "scale-info", "sp-debug-derive", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "static_assertions", ] [[package]] name = "sp-authorship" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "sp-inherents", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-block-builder" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "sp-api", "sp-inherents", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-consensus-aura" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", @@ -3508,25 +3396,27 @@ dependencies = [ "sp-consensus-slots", "sp-inherents", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-timestamp", ] [[package]] name = "sp-consensus-slots" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", "sp-arithmetic", "sp-runtime", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", + "sp-timestamp", ] [[package]] name = "sp-core" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "bitflags", "blake2-rfc", @@ -3538,63 +3428,62 @@ dependencies = [ "libsecp256k1", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "merlin", - "num-traits 0.2.14", + "num-traits 0.2.15", "parity-scale-codec", "parity-util-mem", "primitive-types", "scale-info", "schnorrkel", + "secp256k1", "secrecy", - "sha2 0.10.1", "sp-core-hashing", "sp-debug-derive", "sp-runtime-interface", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-storage", "ss58-registry", - "tiny-keccak", - "twox-hash", "zeroize", ] [[package]] name = "sp-core-hashing" version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ - "blake2-rfc", + "blake2", "byteorder 1.4.3", - "sha2 0.10.1", - "sp-std", - "tiny-keccak", + "digest 0.10.3", + "sha2 0.10.2", + "sha3 0.10.1", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "twox-hash", ] [[package]] name = "sp-core-hashing-proc-macro" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "5.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "proc-macro2", - "quote 1.0.15", + "quote 1.0.18", "sp-core-hashing", - "syn 1.0.86", + "syn 1.0.92", ] [[package]] name = "sp-debug-derive" version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "sp-finality-grandpa" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "finality-grandpa", "parity-scale-codec", @@ -3603,24 +3492,24 @@ dependencies = [ "sp-application-crypto", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-inherents" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "sp-core", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-io" -version = "5.0.0" -source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#5063174549c1464e5a5e08ba0a8e88c837888326" +version = "6.0.0" +source = "git+https://github.com/ajuna-network/sgx-runtime?branch=master#3458223c117826f99daeef0a4c05214a3188adf4" dependencies = [ "environmental", "hash-db", @@ -3632,7 +3521,7 @@ dependencies = [ "sgx_types", "sp-core", "sp-runtime-interface", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-tracing", "sp-wasm-interface", "tracing", @@ -3642,7 +3531,7 @@ dependencies = [ [[package]] name = "sp-offchain" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "sp-api", "sp-core", @@ -3651,8 +3540,8 @@ dependencies = [ [[package]] name = "sp-runtime" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "either", "hash256-std-hasher", @@ -3666,19 +3555,19 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-io", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-runtime-interface" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "primitive-types", "sp-runtime-interface-proc-macro", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-storage", "sp-tracing", "sp-wasm-interface", @@ -3687,75 +3576,80 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" -version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "5.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "Inflector", "proc-macro-crate", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "sp-session" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", "sp-api", "sp-core", "sp-staking", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-staking" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-std" version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=master#f9f0b08273c41beabf5ad5f3d6262d96c947b8d9" + +[[package]] +name = "sp-std" +version = "4.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" [[package]] name = "sp-storage" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "ref-cast", "sp-debug-derive", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-timestamp" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "sp-api", "sp-inherents", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] name = "sp-tracing" -version = "4.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "5.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "tracing", "tracing-core", ] @@ -3763,7 +3657,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "sp-api", "sp-runtime", @@ -3771,51 +3665,51 @@ dependencies = [ [[package]] name = "sp-trie" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "hash-db", "memory-db", "parity-scale-codec", "scale-info", "sp-core", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "trie-db", "trie-root", ] [[package]] name = "sp-version" -version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "5.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "scale-info", "sp-core-hashing-proc-macro", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", "sp-version-proc-macro", ] [[package]] name = "sp-version-proc-macro" version = "4.0.0-dev" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "parity-scale-codec", "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "sp-wasm-interface" -version = "5.0.0" -source = "git+https://github.com/paritytech/substrate.git?branch=master#59649dd117969467d8046df86afe56810f596545" +version = "6.0.0" +source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19#174735ea1bb5fc4513519c45181d8df63d86f613" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -3826,16 +3720,16 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "ss58-registry" -version = "1.12.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8319f44e20b42e5c11b88b1ad4130c35fe2974665a007b08b02322070177136a" +checksum = "7b84a70894df7a73666e0694f44b41a9571625e9546fb58a0818a565d2c7e084" dependencies = [ "Inflector", "proc-macro2", - "quote 1.0.15", - "serde 1.0.136", - "serde_json 1.0.78", - "unicode-xid 0.2.2", + "quote 1.0.18", + "serde 1.0.137", + "serde_json 1.0.81", + "unicode-xid 0.2.3", ] [[package]] @@ -3847,18 +3741,18 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "substrate-api-client" version = "0.6.0" -source = "git+https://github.com/scs/substrate-api-client?branch=master#7b6b822e2826e384346a9ade9585a6f96f4951ca" +source = "git+https://github.com/scs/substrate-api-client?branch=polkadot-v0.9.19#7f3912223c7cd209bddeecd0ba8fa4ff074af65d" dependencies = [ "ac-compose-macros", "ac-primitives", - "frame-metadata 14.2.0 (git+https://github.com/paritytech/frame-metadata.git?branch=main)", + "frame-metadata", "frame-support", "hex", "parity-scale-codec", "sp-application-crypto", "sp-core", "sp-runtime", - "sp-std", + "sp-std 4.0.0 (git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.19)", ] [[package]] @@ -3880,13 +3774,13 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.86" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +checksum = "7ff7c592601f11445996a06f8ad0c27f094a58857c2f89e97974ab9235b92c52" dependencies = [ "proc-macro2", - "quote 1.0.15", - "unicode-xid 0.2.2", + "quote 1.0.18", + "unicode-xid 0.2.3", ] [[package]] @@ -3905,9 +3799,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", - "unicode-xid 0.2.2", + "quote 1.0.18", + "syn 1.0.92", + "unicode-xid 0.2.3", ] [[package]] @@ -3935,11 +3829,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" dependencies = [ - "thiserror-impl 1.0.30", + "thiserror-impl 1.0.31", ] [[package]] @@ -3948,19 +3842,19 @@ version = "1.0.9" source = "git+https://github.com/mesalock-linux/thiserror-sgx?tag=sgx_1.1.3#c2f806b88616e06aab0af770366a76885d974fdc" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] name = "thiserror-impl" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", ] [[package]] @@ -3983,18 +3877,18 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ - "serde 1.0.136", + "serde 1.0.137", ] [[package]] name = "tracing" -version = "0.1.30" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d8d93354fe2a8e50d5953f5ae2e47a3fc2ef03292e7ea46e3cc38f549525fb9" +checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09" dependencies = [ "cfg-if 1.0.0", "pin-project-lite", @@ -4003,9 +3897,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.22" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" +checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f" [[package]] name = "trie-db" @@ -4014,7 +3908,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d32d034c0d3db64b43c31de38e945f15b40cd4ca6d2dcfc26d4798ce8de4ab83" dependencies = [ "hash-db", - "hashbrown 0.12.0", + "hashbrown 0.12.1", "log 0.4.14 (git+https://github.com/mesalock-linux/log-sgx)", "smallvec 1.8.0", ] @@ -4058,11 +3952,12 @@ dependencies = [ [[package]] name = "twox-hash" -version = "1.6.2" +version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", + "digest 0.10.3", "static_assertions", ] @@ -4109,9 +4004,9 @@ checksum = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" [[package]] name = "unicode-xid" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] name = "unsigned-varint" @@ -4202,9 +4097,12 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "wyz" -version = "0.2.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" +checksum = "30b31594f29d27036c383b53b59ed3476874d518f0efb151b27a4c275141390e" +dependencies = [ + "tap", +] [[package]] name = "yasna" @@ -4219,21 +4117,21 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.5.2" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c88870063c39ee00ec285a2f8d6a966e5b6fb2becc4e8dac77ed0d370ed6006" +checksum = "94693807d016b2f2d2e14420eb3bfcca689311ff775dcf113d74ea624b7cdf07" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81e8f13fef10b63c06356d65d416b070798ddabcadc10d3ece0c5be9b3c7eddb" +checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17" dependencies = [ "proc-macro2", - "quote 1.0.15", - "syn 1.0.86", + "quote 1.0.18", + "syn 1.0.92", "synstructure", ] diff --git a/enclave-runtime/Cargo.toml b/enclave-runtime/Cargo.toml index fa34949b74..ccf2427b69 100644 --- a/enclave-runtime/Cargo.toml +++ b/enclave-runtime/Cargo.toml @@ -24,7 +24,7 @@ test = [ "itp-stf-state-handler/test", "itp-storage/test", "itp-test/sgx", - "its-sidechain/test", + "itp-top-pool-author/test", ] [target.'cfg(not(target_env = "sgx"))'.dependencies] @@ -41,8 +41,8 @@ sgx_tunittest = { branch = "master", git = "https://github.com/apache/teaclave-s sgx-crypto-helper = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", package = "sgx_tcrypto_helper" } [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -primitive-types = { version = "0.10.1", default-features = false, features = ["codec", "serde_no_std"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } +primitive-types = { version = "0.11.1", default-features = false, features = ["codec", "serde_no_std"] } lazy_static = { version = "1.1.0", features = ["spin_no_std"] } retain_mut = { version = "0.1.2" } derive_more = { version = "0.99.5" } @@ -53,10 +53,10 @@ ipfs-unixfs = { default-features = false, git = "https://github.com/whalelephant # scs / integritee jsonrpc-core = { default-features = false, git = "https://github.com/scs/jsonrpc", branch = "no_std" } -sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", features = ["sgx"] } +sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", features = ["sgx"] } sgx-runtime = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master"} -sp-io = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", features = ["disable_oom", "disable_panic_handler", "disable_allocator", "sgx"], optional = true } -substrate-api-client = { default-features = false, git = "https://github.com/scs/substrate-api-client", branch = "master" } +sp-io = { default-features = false, features = ["disable_oom", "disable_panic_handler", "disable_allocator", "sgx"], git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", optional = true } +substrate-api-client = { default-features = false, git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } # mesalock linked-hash-map = { git = "https://github.com/mesalock-linux/linked-hash-map-sgx" } @@ -98,24 +98,24 @@ itp-time-utils = { path = "../core-primitives/time-utils", default-features = fa itp-settings = { path = "../core-primitives/settings" } itp-sgx-io = { path = "../core-primitives/sgx/io", default-features = false, features = ["sgx"] } itp-storage = { path = "../core-primitives/storage", default-features = false, features = ["sgx"] } -itp-storage-verifier = { path = "../core-primitives/storage-verified", default-features = false, features = ["sgx"] } itp-sgx-crypto = { path = "../core-primitives/sgx/crypto", default-features = false, features = ["sgx"] } itp-stf-executor = { path = "../core-primitives/stf-executor", default-features = false, features = ["sgx"] } itp-stf-state-handler = { path = "../core-primitives/stf-state-handler", default-features = false, features = ["sgx"] } itp-teerex-storage = { path = "../core-primitives/teerex-storage", default-features = false } itp-test = { path = "../core-primitives/test", default-features = false, optional = true } +itp-top-pool = { path = "../core-primitives/top-pool", default-features = false, features = ["sgx"] } +itp-top-pool-author = { path = "../core-primitives/top-pool-author", default-features = false, features = ["sgx"] } itp-types = { path = "../core-primitives/types", default-features = false, features = ["sgx"] } its-sidechain = { path = "../sidechain/sidechain-crate", default-features = false, features = ["sgx"] } # substrate deps -frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sc-utils = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-version = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-application-crypto = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master" } +frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-version = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-application-crypto = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } [patch.crates-io] log = { git = "https://github.com/mesalock-linux/log-sgx" } @@ -129,10 +129,10 @@ sp-io = { git = "https://github.com/ajuna-network/sgx-runtime", branch = "master #sp-io = { path = "../../ajuna-sgx-runtime/substrate-sgx/sp-io"} -#[patch."https://github.com/ajuna-network/sgx-runtime"] -#sgx-runtime = { path = "../../ajuna-sgx-runtime/runtime", default-features = false } -#sp-io = { path = "../../ajuna-sgx-runtime/substrate-sgx/sp-io", default-features = false, features = ["disable_oom", "disable_panic_handler", "disable_allocator", "sgx"] } -#sgx-externalities = { path = "../../ajuna-sgx-runtime/substrate-sgx/externalities" } +# [patch."https://github.com/ajuna-network/sgx-runtime"] +# sgx-runtime = { path = "../../sgx-runtime/runtime", default-features = false } +# sp-io = { path = "../../sgx-runtime/substrate-sgx/sp-io", default-features = false, features = ["disable_oom", "disable_panic_handler", "disable_allocator", "sgx"] } +# sgx-externalities = { path = "../../sgx-runtime/substrate-sgx/externalities" } [patch."https://github.com/apache/teaclave-sgx-sdk.git"] sgx_tstd = { version = "1.1.4", git = "https://github.com/haerdib/incubator-teaclave-sgx-sdk", branch = "master" } @@ -148,4 +148,7 @@ sgx_tcrypto_helper = { version = "1.1.4", git = "https://github.com/haerdib/incu sgx_crypto_helper = { version = "1.1.4", git = "https://github.com/haerdib/incubator-teaclave-sgx-sdk", branch = "master" } #[patch."https://github.com/scs/substrate-api-client"] -#substrate-api-client = { path = "../../substrate-api-client" } +#substrate-api-client = { path = "../../../scs/substrate-api-client" } + +#[patch."https://github.com/integritee-network/pallets.git"] +#pallet-parentchain = { path = '../../pallets/parentchain' } diff --git a/enclave-runtime/Enclave.edl b/enclave-runtime/Enclave.edl index ed466974f4..f712094d13 100644 --- a/enclave-runtime/Enclave.edl +++ b/enclave-runtime/Enclave.edl @@ -43,6 +43,8 @@ enclave { [out, size=value_size] uint8_t* value, uint32_t value_size ); + public sgx_status_t init_enclave_sidechain_components(); + public sgx_status_t init_direct_invocation_server( [in, size=server_addr_size] uint8_t* server_addr, uint32_t server_addr_size ); @@ -54,6 +56,10 @@ enclave { [out, size=latest_header_size] uint8_t* latest_header, size_t latest_header_size ); + public sgx_status_t init_shard( + [in, size=shard_size] uint8_t* shard, uint32_t shard_size + ); + public sgx_status_t trigger_parentchain_block_import(); public sgx_status_t execute_trusted_getters(); diff --git a/enclave-runtime/src/attestation.rs b/enclave-runtime/src/attestation.rs index 09a7020547..b68c140d4c 100644 --- a/enclave-runtime/src/attestation.rs +++ b/enclave-runtime/src/attestation.rs @@ -41,7 +41,7 @@ use itp_settings::{ node::{REGISTER_ENCLAVE, RUNTIME_SPEC_VERSION, RUNTIME_TRANSACTION_VERSION, TEEREX_MODULE}, }; use itp_sgx_crypto::Ed25519Seal; -use itp_sgx_io::SealedIO; +use itp_sgx_io::StaticSealedIO; use log::*; use sgx_rand::*; use sgx_tcrypto::*; @@ -449,7 +449,7 @@ pub fn create_ra_report_and_signature( ocall_api: &A, skip_ra: bool, ) -> EnclaveResult<(Vec, Vec)> { - let chain_signer = Ed25519Seal::unseal()?; + let chain_signer = Ed25519Seal::unseal_from_static_file()?; info!("[Enclave Attestation] Ed25519 pub raw : {:?}", chain_signer.public().0); info!(" [Enclave] Generate keypair"); @@ -520,7 +520,7 @@ pub unsafe extern "C" fn perform_ra( let url_slice = slice::from_raw_parts(w_url, w_url_size as usize); let extrinsic_slice = slice::from_raw_parts_mut(unchecked_extrinsic, unchecked_extrinsic_size as usize); - let signer = match Ed25519Seal::unseal() { + let signer = match Ed25519Seal::unseal_from_static_file() { Ok(pair) => pair, Err(e) => return e.into(), }; diff --git a/enclave-runtime/src/error.rs b/enclave-runtime/src/error.rs index f5e03a3713..583f6737e4 100644 --- a/enclave-runtime/src/error.rs +++ b/enclave-runtime/src/error.rs @@ -23,11 +23,11 @@ pub type Result = StdResult; #[derive(Debug, Display, From)] pub enum Error { - Rpc(its_sidechain::top_pool_rpc_author::error::Error), + TopPoolAuthor(itp_top_pool_author::error::Error), Codec(codec::Error), - ComponentNotInitialized, + ComponentContainer(itp_component_container::error::Error), Crypto(itp_sgx_crypto::Error), - ChainStorage(itp_storage_verifier::Error), + ChainStorage(itp_ocall_api::Error), ExtrinsicsFactory(itp_extrinsics_factory::error::Error), IO(std::io::Error), LightClient(itc_parentchain::light_client::error::Error), diff --git a/enclave-runtime/src/global_components.rs b/enclave-runtime/src/global_components.rs index b3ad917a55..4905a3e788 100644 --- a/enclave-runtime/src/global_components.rs +++ b/enclave-runtime/src/global_components.rs @@ -20,22 +20,39 @@ //! This allows the crates themselves to stay as generic as possible //! and ensures that the global instances are initialized once. -use crate::ocall::OcallApi; +use crate::{ocall::OcallApi, rpc::rpc_response_channel::RpcResponseChannel}; +use ita_stf::{Hash, State as StfState}; +use itc_direct_rpc_server::{ + rpc_connection_registry::ConnectionRegistry, rpc_responder::RpcResponder, + rpc_watch_extractor::RpcWatchExtractor, rpc_ws_handler::RpcWsHandler, +}; use itc_parentchain::{ block_import_dispatcher::triggered_dispatcher::TriggeredDispatcher, block_importer::ParentchainBlockImporter, indirect_calls_executor::IndirectCallsExecutor, light_client::ValidatorAccessor, }; +use itc_tls_websocket_server::{ + config_provider::FromFileConfigProvider, ws_server::TungsteniteWsServer, ConnectionToken, +}; use itp_block_import_queue::BlockImportQueue; use itp_component_container::ComponentContainer; use itp_extrinsics_factory::ExtrinsicsFactory; use itp_nonce_cache::NonceCache; -use itp_sgx_crypto::Aes; +use itp_sgx_crypto::{Aes, AesSeal}; use itp_stf_executor::executor::StfExecutor; -use itp_stf_state_handler::GlobalFileStateHandler; +use itp_stf_state_handler::{ + file_io::sgx::SgxStateFileIo, state_key_repository::StateKeyRepository, + state_snapshot_repository::StateSnapshotRepository, StateHandler, +}; +use itp_top_pool::basic_pool::BasicPool; +use itp_top_pool_author::{ + api::SidechainApi, + author::{Author, AuthorTopFilter}, +}; use itp_types::{Block as ParentchainBlock, SignedBlock as SignedParentchainBlock}; use its_sidechain::{ aura::block_importer::BlockImporter as SidechainBlockImporter, + block_composer::BlockComposer, consensus_common::{BlockImportQueueWorker, PeerBlockSync}, primitives::{ traits::SignedBlock as SignedSidechainBlockTrait, @@ -43,51 +60,63 @@ use its_sidechain::{ }, state::SidechainDB, top_pool_executor::TopPoolOperationHandler, - top_pool_rpc_author::{ - author::{Author, AuthorTopFilter}, - pool_types::BPool, - }, }; +use primitive_types::H256; use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; use sgx_externalities::SgxExternalities; use sp_core::ed25519::Pair; -pub type EnclaveStfExecutor = StfExecutor; +pub type EnclaveStateKeyRepository = StateKeyRepository; +pub type EnclaveStateFileIo = SgxStateFileIo; +pub type EnclaveStateSnapshotRepository = + StateSnapshotRepository; +pub type EnclaveStateHandler = StateHandler; +pub type EnclaveOCallApi = OcallApi; +pub type EnclaveStfExecutor = StfExecutor; pub type EnclaveExtrinsicsFactory = ExtrinsicsFactory; pub type EnclaveIndirectCallsExecutor = IndirectCallsExecutor; pub type EnclaveValidatorAccessor = ValidatorAccessor; -pub type StateHandler = GlobalFileStateHandler; pub type EnclaveParentChainBlockImporter = ParentchainBlockImporter< ParentchainBlock, EnclaveValidatorAccessor, - OcallApi, + EnclaveOCallApi, EnclaveStfExecutor, EnclaveExtrinsicsFactory, EnclaveIndirectCallsExecutor, - StateHandler, + EnclaveStateHandler, >; pub type EnclaveParentchainBlockImportQueue = BlockImportQueue; pub type EnclaveParentchainBlockImportDispatcher = TriggeredDispatcher; +pub type EnclaveRpcConnectionRegistry = ConnectionRegistry; +pub type EnclaveRpcWsHandler = + RpcWsHandler, EnclaveRpcConnectionRegistry, Hash>; +pub type EnclaveWebSocketServer = TungsteniteWsServer; +pub type EnclaveRpcResponder = RpcResponder; +pub type EnclaveSidechainApi = SidechainApi; + /// Sidechain types pub type EnclaveSidechainState = SidechainDB<::Block, SgxExternalities>; -pub type EnclaveRpcAuthor = - Author; +pub type EnclaveTopPool = BasicPool; +pub type EnclaveTopPoolAuthor = + Author; pub type EnclaveTopPoolOperationHandler = TopPoolOperationHandler< ParentchainBlock, SignedSidechainBlock, - EnclaveRpcAuthor, + EnclaveTopPoolAuthor, EnclaveStfExecutor, >; +pub type EnclaveSidechainBlockComposer = + BlockComposer; pub type EnclaveSidechainBlockImporter = SidechainBlockImporter< Pair, ParentchainBlock, SignedSidechainBlock, - OcallApi, + EnclaveOCallApi, EnclaveSidechainState, - GlobalFileStateHandler, + EnclaveStateHandler, Aes, EnclaveTopPoolOperationHandler, EnclaveParentchainBlockImportDispatcher, @@ -95,8 +124,12 @@ pub type EnclaveSidechainBlockImporter = SidechainBlockImporter< ValidatorAccessor, >; pub type EnclaveSidechainBlockImportQueue = BlockImportQueue; -pub type EnclaveSidechainBlockSyncer = - PeerBlockSync; +pub type EnclaveSidechainBlockSyncer = PeerBlockSync< + ParentchainBlock, + SignedSidechainBlock, + EnclaveSidechainBlockImporter, + EnclaveOCallApi, +>; pub type EnclaveSidechainBlockImportQueueWorker = BlockImportQueueWorker< ParentchainBlock, SignedSidechainBlock, @@ -104,31 +137,73 @@ pub type EnclaveSidechainBlockImportQueueWorker = BlockImportQueueWorker< EnclaveSidechainBlockSyncer, >; -/// Global component instances +/// Base component instances +///------------------------------------------------------------------------------------------------- + +/// State key repository +pub static GLOBAL_STATE_KEY_REPOSITORY_COMPONENT: ComponentContainer = + ComponentContainer::new("State key repository"); + +/// STF executor. +pub static GLOBAL_STF_EXECUTOR_COMPONENT: ComponentContainer = + ComponentContainer::new("STF executor"); + +/// O-Call API +pub static GLOBAL_OCALL_API_COMPONENT: ComponentContainer = + ComponentContainer::new("O-call API"); + +/// Trusted Web-socket server +pub static GLOBAL_WEB_SOCKET_SERVER_COMPONENT: ComponentContainer = + ComponentContainer::new("Web-socket server"); + +/// State handler. +pub static GLOBAL_STATE_HANDLER_COMPONENT: ComponentContainer = + ComponentContainer::new("state handler"); + +/// TOP pool author. +pub static GLOBAL_TOP_POOL_AUTHOR_COMPONENT: ComponentContainer = + ComponentContainer::new("top_pool_author"); + +/// Parentchain component instances +///------------------------------------------------------------------------------------------------- /// Parentchain import dispatcher. pub static GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT: ComponentContainer< EnclaveParentchainBlockImportDispatcher, -> = ComponentContainer::new(); +> = ComponentContainer::new("parentchain import dispatcher"); + +/// Extrinsics factory. +pub static GLOBAL_EXTRINSICS_FACTORY_COMPONENT: ComponentContainer = + ComponentContainer::new("extrinsics_factory"); + +/// Sidechain component instances +///------------------------------------------------------------------------------------------------- + +/// Enclave RPC WS handler. +pub static GLOBAL_RPC_WS_HANDLER_COMPONENT: ComponentContainer = + ComponentContainer::new("rpc_ws_handler"); /// Sidechain import queue. pub static GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT: ComponentContainer< EnclaveSidechainBlockImportQueue, -> = ComponentContainer::new(); +> = ComponentContainer::new("sidechain_import_queue"); /// Sidechain import queue worker - processes the import queue. pub static GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT: ComponentContainer< EnclaveSidechainBlockImportQueueWorker, -> = ComponentContainer::new(); +> = ComponentContainer::new("sidechain_import_queue_worker"); + +/// Sidechain block composer. +pub static GLOBAL_SIDECHAIN_BLOCK_COMPOSER_COMPONENT: ComponentContainer< + EnclaveSidechainBlockComposer, +> = ComponentContainer::new("sidechain_block_composer"); /// Sidechain block syncer. pub static GLOBAL_SIDECHAIN_BLOCK_SYNCER_COMPONENT: ComponentContainer< EnclaveSidechainBlockSyncer, -> = ComponentContainer::new(); - -pub static GLOBAL_RPC_AUTHOR_COMPONENT: ComponentContainer = - ComponentContainer::new(); +> = ComponentContainer::new("sidechain_block_syncer"); +/// Sidechain top pool operation handler. pub static GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT: ComponentContainer< EnclaveTopPoolOperationHandler, -> = ComponentContainer::new(); +> = ComponentContainer::new("top_pool_operation_handler"); diff --git a/enclave-runtime/src/initialization.rs b/enclave-runtime/src/initialization.rs new file mode 100644 index 0000000000..3d5bdf1eb0 --- /dev/null +++ b/enclave-runtime/src/initialization.rs @@ -0,0 +1,308 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::{ + error::{Error, Result as EnclaveResult}, + global_components::{ + EnclaveOCallApi, EnclaveRpcConnectionRegistry, EnclaveRpcResponder, EnclaveSidechainApi, + EnclaveSidechainBlockImportQueue, EnclaveSidechainBlockImportQueueWorker, + EnclaveSidechainBlockImporter, EnclaveSidechainBlockSyncer, EnclaveStateFileIo, + EnclaveStateHandler, EnclaveStateKeyRepository, EnclaveStfExecutor, EnclaveTopPool, + EnclaveTopPoolAuthor, EnclaveTopPoolOperationHandler, EnclaveValidatorAccessor, + GLOBAL_EXTRINSICS_FACTORY_COMPONENT, GLOBAL_OCALL_API_COMPONENT, + GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT, GLOBAL_RPC_WS_HANDLER_COMPONENT, + GLOBAL_SIDECHAIN_BLOCK_COMPOSER_COMPONENT, GLOBAL_SIDECHAIN_BLOCK_SYNCER_COMPONENT, + GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT, GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT, + GLOBAL_STATE_HANDLER_COMPONENT, GLOBAL_STATE_KEY_REPOSITORY_COMPONENT, + GLOBAL_STF_EXECUTOR_COMPONENT, GLOBAL_TOP_POOL_AUTHOR_COMPONENT, + GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT, GLOBAL_WEB_SOCKET_SERVER_COMPONENT, + }, + ocall::OcallApi, + rpc::{rpc_response_channel::RpcResponseChannel, worker_api_direct::public_api_rpc_handler}, + Hash, +}; +use base58::ToBase58; +use codec::Encode; +use ita_stf::State as StfState; +use itc_direct_rpc_server::{ + create_determine_watch, rpc_connection_registry::ConnectionRegistry, + rpc_ws_handler::RpcWsHandler, +}; +use itc_parentchain::{ + block_import_dispatcher::triggered_dispatcher::TriggeredDispatcher, + block_importer::ParentchainBlockImporter, + indirect_calls_executor::IndirectCallsExecutor, + light_client::{concurrent_access::ValidatorAccess, LightClientState}, +}; +use itc_tls_websocket_server::{create_ws_server, ConnectionToken, WebSocketServer}; +use itp_block_import_queue::BlockImportQueue; +use itp_component_container::{ComponentGetter, ComponentInitializer}; +use itp_extrinsics_factory::ExtrinsicsFactory; +use itp_nonce_cache::GLOBAL_NONCE_CACHE; +use itp_primitives_cache::GLOBAL_PRIMITIVES_CACHE; +use itp_settings::files::STATE_SNAPSHOTS_CACHE_SIZE; +use itp_sgx_crypto::{aes, ed25519, rsa3072, AesSeal, Ed25519Seal, Rsa3072Seal}; +use itp_sgx_io::StaticSealedIO; +use itp_stf_state_handler::{ + handle_state::HandleState, query_shard_state::QueryShardState, + state_snapshot_repository_loader::StateSnapshotRepositoryLoader, StateHandler, +}; +use itp_storage::StorageProof; +use itp_top_pool::pool::Options as PoolOptions; +use itp_top_pool_author::author::AuthorTopFilter; +use itp_types::{Block, Header, ShardIdentifier, SignedBlock}; +use its_sidechain::{ + aura::block_importer::BlockImporter, block_composer::BlockComposer, + top_pool_executor::TopPoolOperationHandler, +}; +use log::*; +use primitive_types::H256; +use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; +use sp_core::crypto::Pair; +use sp_finality_grandpa::VersionedAuthorityList; +use std::{string::String, sync::Arc}; + +pub(crate) fn init_enclave(mu_ra_url: String, untrusted_worker_url: String) -> EnclaveResult<()> { + // Initialize the logging environment in the enclave. + env_logger::init(); + + ed25519::create_sealed_if_absent().map_err(Error::Crypto)?; + let signer = Ed25519Seal::unseal_from_static_file().map_err(Error::Crypto)?; + info!("[Enclave initialized] Ed25519 prim raw : {:?}", signer.public().0); + + rsa3072::create_sealed_if_absent()?; + + // Create the aes key that is used for state encryption such that a key is always present in tests. + // It will be overwritten anyway if mutual remote attestation is performed with the primary worker. + aes::create_sealed_if_absent().map_err(Error::Crypto)?; + + let state_key = AesSeal::unseal_from_static_file()?; + let state_key_repository = + Arc::new(EnclaveStateKeyRepository::new(state_key, Arc::new(AesSeal))); + GLOBAL_STATE_KEY_REPOSITORY_COMPONENT.initialize(state_key_repository.clone()); + + let state_file_io = Arc::new(EnclaveStateFileIo::new(state_key_repository)); + let state_snapshot_repository_loader = + StateSnapshotRepositoryLoader::::new(state_file_io); + let state_snapshot_repository = + state_snapshot_repository_loader.load_snapshot_repository(STATE_SNAPSHOTS_CACHE_SIZE)?; + + let state_handler = Arc::new(StateHandler::new(state_snapshot_repository)); + GLOBAL_STATE_HANDLER_COMPONENT.initialize(state_handler.clone()); + + let ocall_api = Arc::new(OcallApi); + GLOBAL_OCALL_API_COMPONENT.initialize(ocall_api.clone()); + + let stf_executor = Arc::new(EnclaveStfExecutor::new(ocall_api.clone(), state_handler.clone())); + GLOBAL_STF_EXECUTOR_COMPONENT.initialize(stf_executor); + + // For debug purposes, list shards. no problem to panic if fails. + let shards = state_handler.list_shards().unwrap(); + debug!("found the following {} shards on disk:", shards.len()); + for s in shards { + debug!("{}", s.encode().to_base58()) + } + + itp_primitives_cache::set_primitives( + GLOBAL_PRIMITIVES_CACHE.as_ref(), + mu_ra_url, + untrusted_worker_url, + ) + .map_err(Error::PrimitivesAccess)?; + + let shielding_key = Rsa3072Seal::unseal_from_static_file()?; + let watch_extractor = Arc::new(create_determine_watch::()); + + let connection_registry = Arc::new(ConnectionRegistry::::new()); + + // We initialize components for the public RPC / direct invocation server here, so we can start the server + // before registering on the parentchain. If we started the RPC AFTER registering on the parentchain and + // initializing the light-client, there is a period of time where a peer might want to reach us, + // but the RPC server is not yet up and running, resulting in error messages or even in that + // validateer completely breaking (IO PipeError). + // Corresponding GH issues are #545 and #600. + + let top_pool_author = create_top_pool_author( + connection_registry.clone(), + state_handler, + ocall_api, + shielding_key, + ); + GLOBAL_TOP_POOL_AUTHOR_COMPONENT.initialize(top_pool_author.clone()); + + let io_handler = public_api_rpc_handler(top_pool_author); + let rpc_handler = Arc::new(RpcWsHandler::new(io_handler, watch_extractor, connection_registry)); + GLOBAL_RPC_WS_HANDLER_COMPONENT.initialize(rpc_handler); + + let sidechain_block_import_queue = Arc::new(EnclaveSidechainBlockImportQueue::default()); + GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT.initialize(sidechain_block_import_queue); + + Ok(()) +} + +pub(crate) fn init_enclave_sidechain_components() -> EnclaveResult<()> { + let stf_executor = GLOBAL_STF_EXECUTOR_COMPONENT.get()?; + let state_handler = GLOBAL_STATE_HANDLER_COMPONENT.get()?; + + let ocall_api = GLOBAL_OCALL_API_COMPONENT.get()?; + let top_pool_author = GLOBAL_TOP_POOL_AUTHOR_COMPONENT.get()?; + + let top_pool_operation_handler = Arc::new(EnclaveTopPoolOperationHandler::new( + top_pool_author.clone(), + stf_executor.clone(), + )); + GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.initialize(top_pool_operation_handler); + + let top_pool_executor = Arc::::new( + TopPoolOperationHandler::new(top_pool_author, stf_executor), + ); + + let parentchain_block_import_dispatcher = + GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT.get()?; + + let signer = Ed25519Seal::unseal_from_static_file()?; + let state_key = AesSeal::unseal_from_static_file()?; + + let validator_access = Arc::new(EnclaveValidatorAccessor::default()); + let genesis_hash = validator_access.execute_on_validator(|v| v.genesis_hash(v.num_relays()))?; + + let extrinsics_factory = + Arc::new(ExtrinsicsFactory::new(genesis_hash, signer.clone(), GLOBAL_NONCE_CACHE.clone())); + + let sidechain_block_importer = Arc::::new(BlockImporter::new( + state_handler, + state_key, + signer.clone(), + top_pool_executor, + parentchain_block_import_dispatcher, + ocall_api.clone(), + extrinsics_factory, + validator_access, + )); + + let sidechain_block_import_queue = GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT.get()?; + + let sidechain_block_syncer = + Arc::new(EnclaveSidechainBlockSyncer::new(sidechain_block_importer, ocall_api)); + GLOBAL_SIDECHAIN_BLOCK_SYNCER_COMPONENT.initialize(sidechain_block_syncer.clone()); + + let sidechain_block_import_queue_worker = + Arc::new(EnclaveSidechainBlockImportQueueWorker::new( + sidechain_block_import_queue, + sidechain_block_syncer, + )); + GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT.initialize(sidechain_block_import_queue_worker); + + let block_composer = Arc::new(BlockComposer::new(signer, state_key)); + GLOBAL_SIDECHAIN_BLOCK_COMPOSER_COMPONENT.initialize(block_composer); + + Ok(()) +} + +pub(crate) fn init_light_client( + genesis_header: Header, + authorities: VersionedAuthorityList, + storage_proof: StorageProof, +) -> EnclaveResult
{ + let latest_header = itc_parentchain::light_client::io::read_or_init_validator::( + genesis_header, + authorities, + storage_proof, + )?; + + // Initialize the global parentchain block import dispatcher instance. + let signer = Ed25519Seal::unseal_from_static_file()?; + let shielding_key = Rsa3072Seal::unseal_from_static_file()?; + let state_handler = GLOBAL_STATE_HANDLER_COMPONENT.get()?; + + let stf_executor = GLOBAL_STF_EXECUTOR_COMPONENT.get()?; + let ocall_api = GLOBAL_OCALL_API_COMPONENT.get()?; + + let validator_access = Arc::new(EnclaveValidatorAccessor::default()); + let genesis_hash = validator_access.execute_on_validator(|v| v.genesis_hash(v.num_relays()))?; + + let extrinsics_factory = + Arc::new(ExtrinsicsFactory::new(genesis_hash, signer, GLOBAL_NONCE_CACHE.clone())); + + GLOBAL_EXTRINSICS_FACTORY_COMPONENT.initialize(extrinsics_factory.clone()); + + let indirect_calls_executor = + Arc::new(IndirectCallsExecutor::new(shielding_key, stf_executor.clone())); + let parentchain_block_importer = ParentchainBlockImporter::new( + validator_access, + ocall_api, + stf_executor, + extrinsics_factory, + indirect_calls_executor, + state_handler, + ); + let parentchain_block_import_queue = BlockImportQueue::::default(); + let parentchain_block_import_dispatcher = Arc::new(TriggeredDispatcher::new( + parentchain_block_importer, + parentchain_block_import_queue, + )); + + GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT.initialize(parentchain_block_import_dispatcher); + + Ok(latest_header) +} + +pub(crate) fn init_direct_invocation_server(server_addr: String) -> EnclaveResult<()> { + let rpc_handler = GLOBAL_RPC_WS_HANDLER_COMPONENT.get()?; + + let web_socket_server = create_ws_server(server_addr.as_str(), rpc_handler); + + GLOBAL_WEB_SOCKET_SERVER_COMPONENT.initialize(web_socket_server.clone()); + + match web_socket_server.run() { + Ok(()) => {}, + Err(e) => { + error!("Web socket server encountered an unexpected error: {:?}", e) + }, + } + + Ok(()) +} + +pub(crate) fn init_shard(shard: ShardIdentifier) -> EnclaveResult<()> { + let state_handler = GLOBAL_STATE_HANDLER_COMPONENT.get()?; + let _ = state_handler.initialize_shard(shard)?; + Ok(()) +} + +/// Initialize the TOP pool author component. +pub fn create_top_pool_author( + connection_registry: Arc, + state_handler: Arc, + ocall_api: Arc, + shielding_crypto: Rsa3072KeyPair, +) -> Arc { + let response_channel = Arc::new(RpcResponseChannel::default()); + let rpc_responder = Arc::new(EnclaveRpcResponder::new(connection_registry, response_channel)); + + let side_chain_api = Arc::new(EnclaveSidechainApi::new()); + let top_pool = + Arc::new(EnclaveTopPool::create(PoolOptions::default(), side_chain_api, rpc_responder)); + + Arc::new(EnclaveTopPoolAuthor::new( + top_pool, + AuthorTopFilter {}, + state_handler, + shielding_crypto, + ocall_api, + )) +} diff --git a/enclave-runtime/src/lib.rs b/enclave-runtime/src/lib.rs index 52733356ae..9ab43e3844 100644 --- a/enclave-runtime/src/lib.rs +++ b/enclave-runtime/src/lib.rs @@ -35,65 +35,41 @@ use sgx_types::size_t; use crate::{ error::{Error, Result}, global_components::{ - EnclaveSidechainBlockImportQueue, EnclaveSidechainBlockImportQueueWorker, - EnclaveSidechainBlockImporter, EnclaveSidechainBlockSyncer, EnclaveStfExecutor, - EnclaveTopPoolOperationHandler, EnclaveValidatorAccessor, - GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT, GLOBAL_RPC_AUTHOR_COMPONENT, - GLOBAL_SIDECHAIN_BLOCK_SYNCER_COMPONENT, GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT, - GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT, - GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT, + GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT, GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT, + GLOBAL_STATE_HANDLER_COMPONENT, }, ocall::OcallApi, - rpc::worker_api_direct::{public_api_rpc_handler, sidechain_io_handler}, + rpc::worker_api_direct::sidechain_io_handler, utils::{hash_from_slice, utf8_str_from_raw, write_slice_and_whitespace_pad, DecodeRaw}, }; -use base58::ToBase58; use codec::{alloc::string::String, Decode, Encode}; use ita_stf::{Getter, ShardIdentifier, Stf}; -use itc_direct_rpc_server::{ - create_determine_watch, rpc_connection_registry::ConnectionRegistry, - rpc_ws_handler::RpcWsHandler, +use itc_parentchain::block_import_dispatcher::{ + triggered_dispatcher::TriggerParentchainBlockImport, DispatchBlockImport, }; -use itc_parentchain::{ - block_import_dispatcher::{ - triggered_dispatcher::{TriggerParentchainBlockImport, TriggeredDispatcher}, - DispatchBlockImport, - }, - block_importer::ParentchainBlockImporter, - indirect_calls_executor::IndirectCallsExecutor, - light_client::{concurrent_access::ValidatorAccess, LightClientState}, -}; -use itc_tls_websocket_server::{connection::TungsteniteWsConnection, run_ws_server}; -use itp_block_import_queue::{BlockImportQueue, PushToBlockQueue}; -use itp_component_container::{ComponentGetter, ComponentInitializer}; -use itp_extrinsics_factory::ExtrinsicsFactory; +use itp_block_import_queue::PushToBlockQueue; +use itp_component_container::ComponentGetter; use itp_nonce_cache::{MutateNonce, Nonce, GLOBAL_NONCE_CACHE}; use itp_ocall_api::EnclaveAttestationOCallApi; -use itp_primitives_cache::GLOBAL_PRIMITIVES_CACHE; use itp_settings::node::{ REGISTER_ENCLAVE, RUNTIME_SPEC_VERSION, RUNTIME_TRANSACTION_VERSION, TEEREX_MODULE, }; -use itp_sgx_crypto::{aes, ed25519, rsa3072, AesSeal, Ed25519Seal, Rsa3072Seal}; +use itp_sgx_crypto::{ed25519, Ed25519Seal, Rsa3072Seal}; use itp_sgx_io as io; -use itp_sgx_io::SealedIO; -use itp_stf_executor::executor::StfExecutor; -use itp_stf_state_handler::{ - handle_state::HandleState, query_shard_state::QueryShardState, GlobalFileStateHandler, -}; +use itp_sgx_io::StaticSealedIO; +use itp_stf_state_handler::handle_state::HandleState; use itp_storage::StorageProof; -use itp_types::{Block, Header, SignedBlock}; -use its_sidechain::{ - aura::block_importer::BlockImporter, top_pool_executor::TopPoolOperationHandler, -}; +use itp_types::{Header, SignedBlock}; use log::*; use sgx_types::sgx_status_t; use sp_core::crypto::Pair; use sp_finality_grandpa::VersionedAuthorityList; -use std::{slice, sync::Arc, vec::Vec}; +use std::{slice, vec::Vec}; use substrate_api_client::compose_extrinsic_offline; mod attestation; mod global_components; +mod initialization; mod ipfs; mod ocall; mod utils; @@ -130,37 +106,6 @@ pub unsafe extern "C" fn init( untrusted_worker_addr: *const u8, untrusted_worker_addr_size: u32, ) -> sgx_status_t { - // Initialize the logging environment in the enclave. - env_logger::init(); - - if let Err(e) = ed25519::create_sealed_if_absent().map_err(Error::Crypto) { - return e.into() - } - let signer = match Ed25519Seal::unseal().map_err(Error::Crypto) { - Ok(pair) => pair, - Err(e) => return e.into(), - }; - info!("[Enclave initialized] Ed25519 prim raw : {:?}", signer.public().0); - - if let Err(e) = rsa3072::create_sealed_if_absent() { - return e.into() - } - - // Create the aes key that is used for state encryption such that a key is always present in tests. - // It will be overwritten anyway if mutual remote attastation is performed with the primary worker. - if let Err(e) = aes::create_sealed_if_absent().map_err(Error::Crypto) { - return e.into() - } - - let state_handler = GlobalFileStateHandler; - - // For debug purposes, list shards. no problem to panic if fails. - let shards = state_handler.list_shards().unwrap(); - debug!("found the following {} shards on disk:", shards.len()); - for s in shards { - debug!("{}", s.encode().to_base58()) - } - let mu_ra_url = match String::decode(&mut slice::from_raw_parts(mu_ra_addr, mu_ra_addr_size as usize)) .map_err(Error::Codec) @@ -179,17 +124,10 @@ pub unsafe extern "C" fn init( Err(e) => return e.into(), }; - if let Err(e) = itp_primitives_cache::set_primitives( - GLOBAL_PRIMITIVES_CACHE.as_ref(), - &mu_ra_url, - &untrusted_worker_url, - ) - .map_err(Error::PrimitivesAccess) - { - return e.into() + match initialization::init_enclave(mu_ra_url, untrusted_worker_url) { + Err(e) => e.into(), + Ok(()) => sgx_status_t::SGX_SUCCESS, } - - sgx_status_t::SGX_SUCCESS } #[no_mangle] @@ -222,7 +160,7 @@ pub unsafe extern "C" fn get_ecc_signing_pubkey(pubkey: *mut u8, pubkey_size: u3 return e.into() } - let signer = match Ed25519Seal::unseal().map_err(Error::Crypto) { + let signer = match Ed25519Seal::unseal_from_static_file().map_err(Error::Crypto) { Ok(pair) => pair, Err(e) => return e.into(), }; @@ -273,7 +211,7 @@ pub unsafe extern "C" fn mock_register_enclave_xt( .get_mrenclave_of_self() .map_or_else(|_| Vec::::new(), |m| m.m.encode()); - let signer = Ed25519Seal::unseal().unwrap(); + let signer = Ed25519Seal::unseal_from_static_file().unwrap(); let call = ([TEEREX_MODULE, REGISTER_ENCLAVE], mre, url); let nonce_cache = GLOBAL_NONCE_CACHE.clone(); @@ -299,8 +237,8 @@ pub unsafe extern "C" fn mock_register_enclave_xt( sgx_status_t::SGX_SUCCESS } -/// this is reduced to the side chain block import RPC interface (i.e. worker-worker communication) -/// the entire rest of the RPC server is run inside the enclave and does not use this e-call function anymore +/// This is reduced to the sidechain block import RPC interface (i.e. worker-worker communication). +/// The entire rest of the RPC server is run inside the enclave and does not use this e-call function anymore. #[no_mangle] pub unsafe extern "C" fn call_rpc_methods( request: *const u8, @@ -331,9 +269,7 @@ pub unsafe extern "C" fn call_rpc_methods( } fn sidechain_rpc_int(request: &str) -> Result { - let sidechain_block_import_queue = GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT - .get() - .ok_or(Error::ComponentNotInitialized)?; + let sidechain_block_import_queue = GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT.get()?; let io = sidechain_io_handler(move |signed_block| { sidechain_block_import_queue.push_single(signed_block) @@ -357,7 +293,13 @@ pub unsafe extern "C" fn get_state( let shard = ShardIdentifier::from_slice(slice::from_raw_parts(shard, shard_size as usize)); let mut trusted_op_slice = slice::from_raw_parts(trusted_op, trusted_op_size as usize); let value_slice = slice::from_raw_parts_mut(value, value_size as usize); - let getter = Getter::decode(&mut trusted_op_slice).unwrap(); + let getter = match Getter::decode(&mut trusted_op_slice).map_err(Error::Codec) { + Err(e) => { + error!("Failed to decode getter: {:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + Ok(g) => g, + }; if let Getter::trusted(trusted_getter_signed) = getter.clone() { debug!("verifying signature of TrustedGetterSigned"); @@ -367,9 +309,15 @@ pub unsafe extern "C" fn get_state( } } - let state_handler = GlobalFileStateHandler; + let state_handler = match GLOBAL_STATE_HANDLER_COMPONENT.get() { + Ok(a) => a, + Err(e) => { + error!("Failed to retrieve global state handler component: {:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + }; - let mut state = match state_handler.load_initialized(&shard) { + let mut state = match state_handler.load(&shard) { Ok(s) => s, Err(e) => return Error::StfStateHandler(e).into(), }; @@ -383,6 +331,21 @@ pub unsafe extern "C" fn get_state( sgx_status_t::SGX_SUCCESS } +/// Initialize sidechain enclave components. +/// +/// Call this once at startup. Has to be called AFTER the light-client +/// (parentchain components) have been initialized (because we need the parentchain +/// block import dispatcher). +#[no_mangle] +pub unsafe extern "C" fn init_enclave_sidechain_components() -> sgx_status_t { + if let Err(e) = initialization::init_enclave_sidechain_components() { + error!("Failed to initialize sidechain components: {:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + } + + sgx_status_t::SGX_SUCCESS +} + /// Call this once at worker startup to initialize the TOP pool and direct invocation RPC server. /// /// This function will run the RPC server on the same thread as it is called and will loop there. @@ -403,39 +366,10 @@ pub unsafe extern "C" fn init_direct_invocation_server( }, }; - let watch_extractor = Arc::new(create_determine_watch::()); - let connection_registry = Arc::new(ConnectionRegistry::::new()); - - let rsa_shielding_key = match Rsa3072Seal::unseal() { - Ok(k) => k, - Err(e) => { - error!("Failed to unseal shielding key: {:?}", e); - return sgx_status_t::SGX_ERROR_UNEXPECTED - }, - }; - - let state_handler = Arc::new(GlobalFileStateHandler); - let ocall_api = Arc::new(OcallApi); - - let rpc_author = its_sidechain::top_pool_rpc_author::initializer::create_top_pool_rpc_author( - connection_registry.clone(), - state_handler.clone(), - ocall_api.clone(), - rsa_shielding_key, - ); - - GLOBAL_RPC_AUTHOR_COMPONENT.initialize(rpc_author.clone()); - - let stf_executor = Arc::new(EnclaveStfExecutor::new(ocall_api, state_handler)); - let top_pool_operation_handler = - Arc::new(EnclaveTopPoolOperationHandler::new(rpc_author.clone(), stf_executor)); - - GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.initialize(top_pool_operation_handler); - - let io_handler = public_api_rpc_handler(rpc_author); - let rpc_handler = Arc::new(RpcWsHandler::new(io_handler, watch_extractor, connection_registry)); - - run_ws_server(server_addr.as_str(), rpc_handler); + if let Err(e) = initialization::init_direct_invocation_server(server_addr) { + error!("Failed to initialize direct invocation server: {:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + } sgx_status_t::SGX_SUCCESS } @@ -482,103 +416,26 @@ pub unsafe extern "C" fn init_light_client( }, }; - match itc_parentchain::light_client::io::read_or_init_validator::(header, auth, proof) { - Ok(header) => write_slice_and_whitespace_pad(latest_header_slice, header.encode()), - Err(e) => return e.into(), - } - - // Initialize the global parentchain block import dispatcher instance. - let signer = match Ed25519Seal::unseal() { - Ok(s) => s, - Err(e) => { - error!("Error retrieving signer key pair: {:?}", e); - return sgx_status_t::SGX_ERROR_UNEXPECTED - }, - }; - let shielding_key = match Rsa3072Seal::unseal() { - Ok(s) => s, + match initialization::init_light_client(header, auth, proof) { + Ok(h) => write_slice_and_whitespace_pad(latest_header_slice, h.encode()), Err(e) => { - error!("Error retrieving shielding key: {:?}", e); + error!("Failed to initialize light-client: {:?}", e); return sgx_status_t::SGX_ERROR_UNEXPECTED }, - }; - let state_key = match AesSeal::unseal() { - Ok(k) => k, - Err(e) => { - error!("Failed to unseal state key: {:?}", e); - return sgx_status_t::SGX_ERROR_UNEXPECTED - }, - }; - let rpc_author = match GLOBAL_RPC_AUTHOR_COMPONENT.get() { - Some(a) => a, - None => { - error!("Failed to retrieve global top pool author"); - return sgx_status_t::SGX_ERROR_UNEXPECTED - }, - }; - - let validator_access = Arc::new(EnclaveValidatorAccessor::default()); - let genesis_hash = - match validator_access.execute_on_validator(|v| v.genesis_hash(v.num_relays())) { - Ok(g) => g, - Err(e) => { - error!("Error retrieving genesis hash: {:?}", e); - return sgx_status_t::SGX_ERROR_UNEXPECTED - }, - }; - - let file_state_handler = Arc::new(GlobalFileStateHandler); - let ocall_api = Arc::new(OcallApi); - let stf_executor = Arc::new(StfExecutor::new(ocall_api.clone(), file_state_handler.clone())); - let extrinsics_factory = - Arc::new(ExtrinsicsFactory::new(genesis_hash, signer.clone(), GLOBAL_NONCE_CACHE.clone())); - let indirect_calls_executor = - Arc::new(IndirectCallsExecutor::new(shielding_key, stf_executor.clone())); - let parentchain_block_importer = ParentchainBlockImporter::new( - validator_access.clone(), - ocall_api.clone(), - stf_executor.clone(), - extrinsics_factory.clone(), - indirect_calls_executor, - file_state_handler.clone(), - ); - let parentchain_block_import_queue = BlockImportQueue::::default(); - let parentchain_block_import_dispatcher = Arc::new(TriggeredDispatcher::new( - parentchain_block_importer, - parentchain_block_import_queue, - )); - - GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT - .initialize(parentchain_block_import_dispatcher.clone()); - - let top_pool_executor = Arc::::new( - TopPoolOperationHandler::new(rpc_author, stf_executor), - ); - let sidechain_block_importer = Arc::::new(BlockImporter::new( - file_state_handler, - state_key, - signer, - top_pool_executor, - parentchain_block_import_dispatcher, - ocall_api.clone(), - extrinsics_factory, - validator_access, - )); - - let sidechain_block_syncer = - Arc::new(EnclaveSidechainBlockSyncer::new(sidechain_block_importer, ocall_api)); + } - GLOBAL_SIDECHAIN_BLOCK_SYNCER_COMPONENT.initialize(sidechain_block_syncer.clone()); + sgx_status_t::SGX_SUCCESS +} - let sidechain_block_import_queue = Arc::new(EnclaveSidechainBlockImportQueue::default()); - GLOBAL_SIDECHAIN_IMPORT_QUEUE_COMPONENT.initialize(sidechain_block_import_queue.clone()); +#[no_mangle] +pub unsafe extern "C" fn init_shard(shard: *const u8, shard_size: u32) -> sgx_status_t { + let shard_identifier = + ShardIdentifier::from_slice(slice::from_raw_parts(shard, shard_size as usize)); - let sidechain_block_import_queue_worker = - Arc::new(EnclaveSidechainBlockImportQueueWorker::new( - sidechain_block_import_queue, - sidechain_block_syncer, - )); - GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT.initialize(sidechain_block_import_queue_worker); + if let Err(e) = initialization::init_shard(shard_identifier) { + error!("Failed to initialize shard ({:?}): {:?}", shard_identifier, e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + } sgx_status_t::SGX_SUCCESS } @@ -609,9 +466,7 @@ pub unsafe extern "C" fn sync_parentchain( /// * sends `confirm_call` xt's of the executed unshielding calls /// * sends `confirm_blocks` xt's for every synced parentchain block fn sync_parentchain_internal(blocks_to_sync: Vec) -> Result<()> { - let block_import_dispatcher = GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT - .get() - .ok_or(Error::ComponentNotInitialized)?; + let block_import_dispatcher = GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT.get()?; block_import_dispatcher.dispatch_import(blocks_to_sync).map_err(|e| e.into()) } @@ -624,13 +479,13 @@ fn sync_parentchain_internal(blocks_to_sync: Vec) -> Result<()> { #[no_mangle] pub unsafe extern "C" fn trigger_parentchain_block_import() -> sgx_status_t { match GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT.get() { - Some(dispatcher) => match dispatcher.import_all() { + Ok(dispatcher) => match dispatcher.import_all() { Ok(_) => sgx_status_t::SGX_SUCCESS, Err(e) => { error!("Failed to trigger import of parentchain blocks: {:?}", e); sgx_status_t::SGX_ERROR_UNEXPECTED }, }, - None => (Error::ComponentNotInitialized).into(), + Err(e) => Error::ComponentContainer(e).into(), } } diff --git a/enclave-runtime/src/ocall/metrics_ocall.rs b/enclave-runtime/src/ocall/metrics_ocall.rs index 30dba8ca9d..0d12dfd7d6 100644 --- a/enclave-runtime/src/ocall/metrics_ocall.rs +++ b/enclave-runtime/src/ocall/metrics_ocall.rs @@ -15,7 +15,7 @@ */ -use crate::{ocall::ffi, OcallApi}; +use crate::ocall::{ffi, OcallApi}; use codec::Encode; use frame_support::ensure; use itp_ocall_api::EnclaveMetricsOCallApi; diff --git a/enclave-runtime/src/ocall/on_chain_ocall.rs b/enclave-runtime/src/ocall/on_chain_ocall.rs index 22ac3fdde9..e2bbd4071e 100644 --- a/enclave-runtime/src/ocall/on_chain_ocall.rs +++ b/enclave-runtime/src/ocall/on_chain_ocall.rs @@ -19,11 +19,12 @@ use crate::ocall::{ffi, OcallApi}; use codec::{Decode, Encode}; use frame_support::ensure; -use itp_ocall_api::EnclaveOnChainOCallApi; -use itp_types::{WorkerRequest, WorkerResponse}; +use itp_ocall_api::{EnclaveOnChainOCallApi, Result}; +use itp_storage::{verify_storage_entries, Error as StorageError, StorageEntryVerified}; +use itp_types::{WorkerRequest, WorkerResponse, H256}; use log::*; use sgx_types::*; -use sp_runtime::OpaqueExtrinsic; +use sp_runtime::{traits::Header, OpaqueExtrinsic}; use std::vec::Vec; impl EnclaveOnChainOCallApi for OcallApi { @@ -74,4 +75,35 @@ impl EnclaveOnChainOCallApi for OcallApi { Ok(decoded_response) } + + fn get_storage_verified, V: Decode>( + &self, + storage_hash: Vec, + header: &H, + ) -> Result> { + // the code below seems like an overkill, but it is surprisingly difficult to + // get an owned value from a `Vec` without cloning. + Ok(self + .get_multiple_storages_verified(vec![storage_hash], header)? + .into_iter() + .next() + .ok_or(StorageError::StorageValueUnavailable)?) + } + + fn get_multiple_storages_verified, V: Decode>( + &self, + storage_hashes: Vec>, + header: &H, + ) -> Result>> { + let requests = storage_hashes + .into_iter() + .map(|key| WorkerRequest::ChainStorage(key, Some(header.hash()))) + .collect(); + + let storage_entries = self + .worker_request::>(requests) + .map(|storages| verify_storage_entries(storages, header))??; + + Ok(storage_entries) + } } diff --git a/enclave-runtime/src/rpc/mod.rs b/enclave-runtime/src/rpc/mod.rs index b1040b4e3e..5b359ab270 100644 --- a/enclave-runtime/src/rpc/mod.rs +++ b/enclave-runtime/src/rpc/mod.rs @@ -15,4 +15,5 @@ */ +pub mod rpc_response_channel; pub mod worker_api_direct; diff --git a/enclave-runtime/src/rpc/rpc_response_channel.rs b/enclave-runtime/src/rpc/rpc_response_channel.rs new file mode 100644 index 0000000000..71f03a0f67 --- /dev/null +++ b/enclave-runtime/src/rpc/rpc_response_channel.rs @@ -0,0 +1,40 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::global_components::GLOBAL_WEB_SOCKET_SERVER_COMPONENT; +use itc_direct_rpc_server::{response_channel::ResponseChannel, DirectRpcError}; +use itc_tls_websocket_server::{ConnectionToken, WebSocketResponder}; +use itp_component_container::ComponentGetter; +use std::string::String; + +/// RPC response channel. +/// +/// Uses the web-socket server to send an RPC response/update. +/// In case no server is available or running, the response will be discarded. +#[derive(Default)] +pub struct RpcResponseChannel; + +impl ResponseChannel for RpcResponseChannel { + type Error = DirectRpcError; + + fn respond(&self, token: ConnectionToken, message: String) -> Result<(), Self::Error> { + let web_socket_server = GLOBAL_WEB_SOCKET_SERVER_COMPONENT + .get() + .map_err(|e| DirectRpcError::Other(e.into()))?; + web_socket_server.send_message(token, message).map_err(|e| e.into()) + } +} diff --git a/enclave-runtime/src/rpc/worker_api_direct.rs b/enclave-runtime/src/rpc/worker_api_direct.rs index 6f46439fc4..126a22aa60 100644 --- a/enclave-runtime/src/rpc/worker_api_direct.rs +++ b/enclave-runtime/src/rpc/worker_api_direct.rs @@ -19,11 +19,11 @@ use codec::Encode; use core::result::Result; use itp_primitives_cache::{GetPrimitives, GLOBAL_PRIMITIVES_CACHE}; use itp_sgx_crypto::Rsa3072Seal; +use itp_top_pool_author::traits::AuthorApi; use itp_types::{DirectRequestStatus, RpcReturnValue, H256}; use its_sidechain::{ primitives::types::SignedBlock, rpc_handler::{direct_top_pool_api, import_block_api}, - top_pool_rpc_author::traits::AuthorApi, }; use jsonrpc_core::{serde_json::json, IoHandler, Params, Value}; use sgx_runtime::Runtime; @@ -43,14 +43,14 @@ fn get_all_rpc_methods_string(io_handler: &IoHandler) -> String { format!("methods: [{}]", method_string) } -pub fn public_api_rpc_handler(rpc_author: Arc) -> IoHandler +pub fn public_api_rpc_handler(top_pool_author: Arc) -> IoHandler where R: AuthorApi + Send + Sync + 'static, { let io = IoHandler::new(); // Add direct TOP pool rpc methods - let mut io = direct_top_pool_api::add_top_pool_direct_rpc_methods(rpc_author, io); + let mut io = direct_top_pool_api::add_top_pool_direct_rpc_methods(top_pool_author, io); // author_getShieldingKey let rsa_pubkey_name: &str = "author_getShieldingKey"; diff --git a/enclave-runtime/src/test/cert_tests.rs b/enclave-runtime/src/test/cert_tests.rs index 45ab3f1dd8..606261363a 100644 --- a/enclave-runtime/src/test/cert_tests.rs +++ b/enclave-runtime/src/test/cert_tests.rs @@ -16,14 +16,13 @@ */ -use hex::FromHexError; -use sgx_types::{sgx_measurement_t, sgx_status_t, SGX_HASH_SIZE}; -use std::vec::Vec; - use crate::{ cert::{verify_attn_report, verify_mra_cert}, test::mocks::attestation_ocall_mock::AttestationOCallMock, }; +use hex::FromHexError; +use sgx_types::{sgx_measurement_t, sgx_status_t, SGX_HASH_SIZE}; +use std::vec::Vec; // Test data and tests are mostly copied from: // https://github.com/integritee-network/pallet-teerex/blob/master/ias-verify/ diff --git a/enclave-runtime/src/test/fixtures/initialize_test_state.rs b/enclave-runtime/src/test/fixtures/initialize_test_state.rs index 87f0907734..456caba0d9 100644 --- a/enclave-runtime/src/test/fixtures/initialize_test_state.rs +++ b/enclave-runtime/src/test/fixtures/initialize_test_state.rs @@ -27,12 +27,13 @@ pub fn init_state>( ) -> (State, ShardIdentifier) { let shard = ShardIdentifier::default(); + let _hash = state_handler.initialize_shard(shard).unwrap(); let (lock, _) = state_handler.load_for_mutation(&shard).unwrap(); let mut state = Stf::init_state(); state.prune_state_diff(); - state_handler.write(state.clone(), lock, &shard).unwrap(); + state_handler.write_after_mutation(state.clone(), lock, &shard).unwrap(); (state, shard) } diff --git a/enclave-runtime/src/test/mocks/propose_to_import_call_mock.rs b/enclave-runtime/src/test/mocks/propose_to_import_call_mock.rs index d1ee8aa293..3bee13cfa4 100644 --- a/enclave-runtime/src/test/mocks/propose_to_import_call_mock.rs +++ b/enclave-runtime/src/test/mocks/propose_to_import_call_mock.rs @@ -18,15 +18,16 @@ use crate::test::mocks::types::TestBlockImporter; use codec::{Decode, Encode}; -use itp_ocall_api::{EnclaveOnChainOCallApi, EnclaveSidechainOCallApi}; +use itp_ocall_api::{EnclaveOnChainOCallApi, EnclaveSidechainOCallApi, Result}; +use itp_storage::StorageEntryVerified; use itp_types::{ - BlockHash, Header as ParentchainHeader, ShardIdentifier, WorkerRequest, WorkerResponse, + BlockHash, Header as ParentchainHeader, ShardIdentifier, WorkerRequest, WorkerResponse, H256, }; use its_sidechain::{ consensus_common::BlockImport, primitives::types::SignedBlock as SignedSidechainBlockType, }; use sgx_types::SgxResult; -use sp_runtime::OpaqueExtrinsic; +use sp_runtime::{traits::Header as ParentchainHeaderTrait, OpaqueExtrinsic}; use std::{sync::Arc, vec::Vec}; /// OCallApi mock that routes the proposed sidechain blocks directly to the importer, @@ -57,6 +58,22 @@ impl EnclaveOnChainOCallApi for ProposeToImportOCallApi { ) -> SgxResult>> { todo!() } + + fn get_storage_verified, V: Decode>( + &self, + _storage_hash: Vec, + _header: &H, + ) -> Result> { + todo!() + } + + fn get_multiple_storages_verified, V: Decode>( + &self, + _storage_hashes: Vec>, + _header: &H, + ) -> Result>> { + todo!() + } } impl EnclaveSidechainOCallApi for ProposeToImportOCallApi { diff --git a/enclave-runtime/src/test/mocks/types.rs b/enclave-runtime/src/test/mocks/types.rs index 14ebbd8c33..685b04588b 100644 --- a/enclave-runtime/src/test/mocks/types.rs +++ b/enclave-runtime/src/test/mocks/types.rs @@ -26,7 +26,12 @@ use itp_sgx_crypto::Aes; use itp_stf_executor::executor::StfExecutor; use itp_test::mock::{ handle_state_mock::HandleStateMock, metrics_ocall_mock::MetricsOCallMock, - ocall_api_mock::OcallApiMock, + onchain_mock::OnchainMock, +}; +use itp_top_pool::basic_pool::BasicPool; +use itp_top_pool_author::{ + api::SidechainApi, + author::{Author, AuthorTopFilter}, }; use itp_types::{Block as ParentchainBlock, SignedBlock as SignedParentchainBlock}; use its_sidechain::{ @@ -34,12 +39,7 @@ use its_sidechain::{ block_composer::BlockComposer, primitives::types::{Block as SidechainBlock, SignedBlock as SignedSidechainBlock}, state::SidechainDB, - top_pool::basic_pool::BasicPool, top_pool_executor::TopPoolOperationHandler, - top_pool_rpc_author::{ - api::SidechainApi, - author::{Author, AuthorTopFilter}, - }, }; use primitive_types::H256; use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; @@ -56,7 +56,7 @@ pub type TestStateHandler = HandleStateMock; pub type TestSidechainDb = SidechainDB; -pub type TestOCallApi = OcallApiMock; +pub type TestOCallApi = OnchainMock; pub type TestParentchainBlockImportTrigger = TriggerParentchainBlockImportMock; @@ -68,14 +68,18 @@ pub type TestRpcResponder = RpcResponderMock; pub type TestTopPool = BasicPool, ParentchainBlock, TestRpcResponder>; -pub type TestRpcAuthor = +pub type TestTopPoolAuthor = Author; -pub type TestTopPoolExecutor = - TopPoolOperationHandler; +pub type TestTopPoolExecutor = TopPoolOperationHandler< + ParentchainBlock, + SignedSidechainBlock, + TestTopPoolAuthor, + TestStfExecutor, +>; pub type TestBlockComposer = - BlockComposer; + BlockComposer; pub type TestBlockImporter = BlockImporter< TestSigner, diff --git a/enclave-runtime/src/test/sidechain_aura_tests.rs b/enclave-runtime/src/test/sidechain_aura_tests.rs index 6a2c44c5c4..1f64d78aab 100644 --- a/enclave-runtime/src/test/sidechain_aura_tests.rs +++ b/enclave-runtime/src/test/sidechain_aura_tests.rs @@ -39,14 +39,14 @@ use itp_test::{ mock::{handle_state_mock::HandleStateMock, metrics_ocall_mock::MetricsOCallMock}, }; use itp_time_utils::duration_now; +use itp_top_pool::pool::Options as PoolOptions; +use itp_top_pool_author::{api::SidechainApi, author::AuthorTopFilter, traits::AuthorApi}; use itp_types::{AccountId, Block as ParentchainBlock, Enclave, ShardIdentifier}; use its_sidechain::{ aura::proposer_factory::ProposerFactory, - primitives::types::SignedBlock as SignedSidechainBlock, + primitives::{traits::Block, types::SignedBlock as SignedSidechainBlock}, slots::{slot_from_time_stamp_and_duration, SlotInfo}, state::SidechainState, - top_pool::pool::Options as PoolOptions, - top_pool_rpc_author::{api::SidechainApi, author::AuthorTopFilter, traits::AuthorApi}, }; use jsonrpc_core::futures::executor; use log::*; @@ -78,7 +78,7 @@ pub fn produce_sidechain_block_and_import_it() { let stf_executor = Arc::new(TestStfExecutor::new(ocall_api.clone(), state_handler.clone())); let top_pool = create_top_pool(); - let rpc_author = Arc::new(TestRpcAuthor::new( + let top_pool_author = Arc::new(TestTopPoolAuthor::new( top_pool, AuthorTopFilter {}, state_handler.clone(), @@ -86,8 +86,11 @@ pub fn produce_sidechain_block_and_import_it() { Arc::new(MetricsOCallMock {}), )); let top_pool_operation_handler = - Arc::new(TestTopPoolExecutor::new(rpc_author.clone(), stf_executor.clone())); + Arc::new(TestTopPoolExecutor::new(top_pool_author.clone(), stf_executor.clone())); let parentchain_block_import_trigger = Arc::new(TestParentchainBlockImportTrigger::default()); + let extrinsics_factory = Arc::new(ExtrinsicsFactoryMock::default()); + let validator_access = Arc::new(ValidatorAccessMock::default()); + let block_importer = Arc::new(TestBlockImporter::new( state_handler.clone(), state_key, @@ -95,15 +98,12 @@ pub fn produce_sidechain_block_and_import_it() { top_pool_operation_handler.clone(), parentchain_block_import_trigger.clone(), ocall_api.clone(), - Arc::new(ExtrinsicsFactoryMock::default()), - Arc::new(ValidatorAccessMock::default()), + extrinsics_factory.clone(), + validator_access.clone(), )); - let block_composer = - Arc::new(TestBlockComposer::new(signer.clone(), state_key, rpc_author.clone())); + let block_composer = Arc::new(TestBlockComposer::new(signer.clone(), state_key)); let proposer_environment = ProposerFactory::new(top_pool_operation_handler, stf_executor.clone(), block_composer); - let extrinsics_factory = ExtrinsicsFactoryMock::default(); - let validator_access = ValidatorAccessMock::default(); info!("Create trusted operations.."); let sender = endowed_account(); @@ -127,12 +127,12 @@ pub fn produce_sidechain_block_and_import_it() { 200000, ); info!("Add trusted operations to TOP pool.."); - executor::block_on(rpc_author.submit_top(trusted_operation, shard_id)).unwrap(); - executor::block_on(rpc_author.submit_top(invalid_trusted_operation, shard_id)).unwrap(); + executor::block_on(top_pool_author.submit_top(trusted_operation, shard_id)).unwrap(); + executor::block_on(top_pool_author.submit_top(invalid_trusted_operation, shard_id)).unwrap(); // Ensure we have exactly two trusted calls in our TOP pool, and no getters. - assert_eq!(2, rpc_author.get_pending_tops_separated(shard_id).unwrap().0.len()); - assert!(rpc_author.get_pending_tops_separated(shard_id).unwrap().1.is_empty()); + assert_eq!(2, top_pool_author.get_pending_tops_separated(shard_id).unwrap().0.len()); + assert!(top_pool_author.get_pending_tops_separated(shard_id).unwrap().1.is_empty()); info!("Setup AURA SlotInfo"); let parentchain_header = ParentchainHeaderBuilder::default().build(); @@ -171,7 +171,7 @@ pub fn produce_sidechain_block_and_import_it() { assert!(parentchain_block_import_trigger.has_import_been_called()); // Ensure that invalid calls are removed from pool. Valid calls should only be removed upon block import. - assert_eq!(1, rpc_author.get_pending_tops_separated(shard_id).unwrap().0.len()); + assert_eq!(1, top_pool_author.get_pending_tops_separated(shard_id).unwrap().0.len()); info!("Executed AURA successfully. Sending blocks and extrinsics.."); let propose_to_block_import_ocall_api = @@ -181,13 +181,13 @@ pub fn produce_sidechain_block_and_import_it() { blocks, opaque_calls, propose_to_block_import_ocall_api, - &validator_access, - &extrinsics_factory, + validator_access.as_ref(), + extrinsics_factory.as_ref(), ) .unwrap(); // After importing the sidechain block, the trusted operation should be removed. - assert!(rpc_author.get_pending_tops_separated(shard_id).unwrap().0.is_empty()); + assert!(top_pool_author.get_pending_tops_separated(shard_id).unwrap().0.is_empty()); // After importing the block, the state hash must be changed. // We don't have a way to directly compare state hashes, because calculating the state hash @@ -197,7 +197,7 @@ pub fn produce_sidechain_block_and_import_it() { get_state_hash(state_handler.as_ref(), &shard_id) ); - let mut state = state_handler.load_initialized(&shard_id).unwrap(); + let mut state = state_handler.load(&shard_id).unwrap(); let free_balance = Stf::account_data(&mut state, &receiver.public().into()).unwrap().free; assert_eq!(free_balance, transfered_amount); } @@ -232,14 +232,14 @@ fn get_state_hashes_from_block( signed_block: &SignedSidechainBlock, state_key: &Aes, ) -> (H256, H256) { - let mut state_payload = signed_block.block.state_payload.clone(); - state_key.decrypt(&mut state_payload).unwrap(); - let decoded_state = StatePayload::decode(&mut state_payload.as_slice()).unwrap(); + let mut encrypted_state_diff = signed_block.block.block_data().encrypted_state_diff.clone(); + state_key.decrypt(&mut encrypted_state_diff).unwrap(); + let decoded_state = StatePayload::decode(&mut encrypted_state_diff.as_slice()).unwrap(); (decoded_state.state_hash_apriori(), decoded_state.state_hash_aposteriori()) } fn get_state_hash(state_handler: &HandleStateMock, shard_id: &ShardIdentifier) -> H256 { - let state = state_handler.load_initialized(shard_id).unwrap(); + let state = state_handler.load(shard_id).unwrap(); let sidechain_state = TestSidechainDb::new(state); sidechain_state.state_hash() } diff --git a/enclave-runtime/src/tests.rs b/enclave-runtime/src/tests.rs index 8fe3288ce9..343a9220d9 100644 --- a/enclave-runtime/src/tests.rs +++ b/enclave-runtime/src/tests.rs @@ -51,23 +51,26 @@ use itp_test::mock::{ handle_state_mock, handle_state_mock::HandleStateMock, metrics_ocall_mock::MetricsOCallMock, shielding_crypto_mock::ShieldingCryptoMock, }; +use itp_top_pool::{basic_pool::BasicPool, pool::ExtrinsicHash}; +use itp_top_pool_author::{ + api::SidechainApi, + author::Author, + author_tests, + test_utils::{get_pending_tops_separated, submit_operation_to_top_pool}, + top_filter::AllowAllTopsFilter, +}; use itp_types::{AccountId, Block, Header, MrEnclave, OpaqueCall}; use its_sidechain::{ block_composer::{BlockComposer, ComposeBlockAndConfirmation}, primitives::{ - traits::{Block as BlockT, SignedBlock as SignedBlockT}, + traits::{ + Block as BlockTrait, BlockData, Header as SidechainHeaderTrait, + SignedBlock as SignedBlockTrait, + }, types::block::SignedBlock, }, state::{SidechainDB, SidechainState, SidechainSystemExt}, - top_pool::{basic_pool::BasicPool, pool::ExtrinsicHash}, top_pool_executor::{TopPoolCallOperator, TopPoolOperationHandler}, - top_pool_rpc_author::{ - api::SidechainApi, - author::Author, - author_tests, - test_utils::{get_pending_tops_separated, submit_operation_to_top_pool}, - top_filter::AllowAllTopsFilter, - }, }; use sgx_externalities::{SgxExternalities, SgxExternalitiesTrait}; use sgx_tunittest::*; @@ -78,18 +81,23 @@ use std::{string::String, sync::Arc, vec::Vec}; type TestRpcResponder = RpcResponderMock>>; type TestTopPool = BasicPool, Block, TestRpcResponder>; -type TestRpcAuthor = +type TestTopPoolAuthor = Author; #[no_mangle] pub extern "C" fn test_main_entrance() -> size_t { rsgx_unit_tests!( attestation::tests::decode_spid_works, - itp_stf_state_handler::tests::test_write_and_load_state_works, - itp_stf_state_handler::tests::test_sgx_state_decode_encode_works, - itp_stf_state_handler::tests::test_encrypt_decrypt_state_type_works, - itp_stf_state_handler::tests::test_write_access_locks_read_until_finished, - itp_stf_state_handler::tests::test_ensure_subsequent_state_loads_have_same_hash, + itp_stf_state_handler::test::sgx_tests::test_write_and_load_state_works, + itp_stf_state_handler::test::sgx_tests::test_sgx_state_decode_encode_works, + itp_stf_state_handler::test::sgx_tests::test_encrypt_decrypt_state_type_works, + itp_stf_state_handler::test::sgx_tests::test_write_access_locks_read_until_finished, + itp_stf_state_handler::test::sgx_tests::test_ensure_subsequent_state_loads_have_same_hash, + itp_stf_state_handler::test::sgx_tests::test_state_handler_file_backend_is_initialized, + itp_stf_state_handler::test::sgx_tests::test_multiple_state_updates_create_snapshots_up_to_cache_size, + itp_stf_state_handler::test::sgx_tests::test_state_files_from_handler_can_be_loaded_again, + itp_stf_state_handler::test::sgx_tests::test_file_io_get_state_hash_works, + itp_stf_state_handler::test::sgx_tests::test_list_state_ids_ignores_files_not_matching_the_pattern, test_compose_block_and_confirmation, test_submit_trusted_call_to_top_pool, test_submit_trusted_getter_to_top_pool, @@ -109,7 +117,7 @@ pub extern "C" fn test_main_entrance() -> size_t { author_tests::submitting_getter_to_author_when_top_is_filtered_inserts_in_pool, handle_state_mock::tests::initialized_shards_list_is_empty, handle_state_mock::tests::shard_exists_after_inserting, - handle_state_mock::tests::load_initialized_inserts_default_state, + handle_state_mock::tests::initialize_creates_default_state, handle_state_mock::tests::load_mutate_and_write_works, handle_state_mock::tests::ensure_subsequent_state_loads_have_same_hash, handle_state_mock::tests::ensure_encode_and_encrypt_does_not_affect_state_hash, @@ -136,9 +144,9 @@ pub extern "C" fn test_main_entrance() -> size_t { tls_ra::seal_handler::test::seal_shielding_key_works, tls_ra::seal_handler::test::seal_shielding_key_fails_for_invalid_key, tls_ra::seal_handler::test::unseal_seal_shielding_key_works, - tls_ra::seal_handler::test::seal_signing_key_works, - tls_ra::seal_handler::test::seal_signing_key_fails_for_invalid_key, - tls_ra::seal_handler::test::unseal_seal_signing_key_works, + tls_ra::seal_handler::test::seal_state_key_works, + tls_ra::seal_handler::test::seal_state_key_fails_for_invalid_key, + tls_ra::seal_handler::test::unseal_seal_state_key_works, tls_ra::seal_handler::test::seal_state_works, tls_ra::seal_handler::test::seal_state_fails_for_invalid_state, tls_ra::seal_handler::test::unseal_seal_state_works, @@ -154,12 +162,9 @@ pub extern "C" fn test_main_entrance() -> size_t { fn test_compose_block_and_confirmation() { // given - let (rpc_author, _, shard, _, _, state_handler) = test_setup(); - let block_composer = BlockComposer::::new( - test_account(), - state_key(), - rpc_author.clone(), - ); + let (_, _, shard, _, _, state_handler) = test_setup(); + let block_composer = + BlockComposer::::new(test_account(), state_key()); let signed_top_hashes: Vec = vec![[94; 32].into(), [1; 32].into()].to_vec(); @@ -167,7 +172,7 @@ fn test_compose_block_and_confirmation() { let mut db = SidechainDB::::new(state.clone()); db.set_block_number(&1); let state_hash_before_execution = db.state_hash(); - state_handler.write(db.ext.clone(), lock, &shard).unwrap(); + state_handler.write_after_mutation(db.ext.clone(), lock, &shard).unwrap(); // when let (opaque_call, signed_block) = block_composer @@ -184,17 +189,17 @@ fn test_compose_block_and_confirmation() { let expected_call = OpaqueCall::from_tuple(&( [TEEREX_MODULE, PROPOSED_SIDECHAIN_BLOCK], shard, - blake2_256(&signed_block.block().encode()), + blake2_256(&signed_block.block().header().encode()), )); assert!(signed_block.verify_signature()); - assert_eq!(signed_block.block().block_number(), 1); + assert_eq!(signed_block.block().header().block_number(), 1); assert!(opaque_call.encode().starts_with(&expected_call.encode())); } fn test_submit_trusted_call_to_top_pool() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, _) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, _) = test_setup(); let sender = funded_pair(); @@ -204,14 +209,14 @@ fn test_submit_trusted_call_to_top_pool() { // when submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call.clone()), &shielding_key, shard, ) .unwrap(); - let (calls, _) = get_pending_tops_separated(rpc_author.as_ref(), shard); + let (calls, _) = get_pending_tops_separated(top_pool_author.as_ref(), shard); // then assert_eq!(calls[0], signed_call); @@ -219,7 +224,7 @@ fn test_submit_trusted_call_to_top_pool() { fn test_submit_trusted_getter_to_top_pool() { // given - let (rpc_author, _, shard, _, shielding_key, _) = test_setup(); + let (top_pool_author, _, shard, _, shielding_key, _) = test_setup(); let sender = funded_pair(); @@ -227,14 +232,14 @@ fn test_submit_trusted_getter_to_top_pool() { // when submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &signed_getter.clone().into(), &shielding_key, shard, ) .unwrap(); - let (_, getters) = get_pending_tops_separated(rpc_author.as_ref(), shard); + let (_, getters) = get_pending_tops_separated(top_pool_author.as_ref(), shard); // then assert_eq!(getters[0], signed_getter); @@ -242,7 +247,7 @@ fn test_submit_trusted_getter_to_top_pool() { fn test_differentiate_getter_and_call_works() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, _) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, _) = test_setup(); // create accounts let sender = funded_pair(); @@ -256,21 +261,21 @@ fn test_differentiate_getter_and_call_works() { // when submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &signed_getter.clone().into(), &shielding_key, shard, ) .unwrap(); submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call.clone()), &shielding_key, shard, ) .unwrap(); - let (calls, getters) = get_pending_tops_separated(rpc_author.as_ref(), shard); + let (calls, getters) = get_pending_tops_separated(top_pool_author.as_ref(), shard); // then assert_eq!(calls[0], signed_call); @@ -279,17 +284,14 @@ fn test_differentiate_getter_and_call_works() { fn test_create_block_and_confirmation_works() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); let top_pool_executor = TopPoolOperationHandler::::new( - rpc_author.clone(), + top_pool_author.clone(), stf_executor.clone(), ); - let block_composer = BlockComposer::::new( - test_account(), - state_key(), - rpc_author.clone(), - ); + let block_composer = + BlockComposer::::new(test_account(), state_key()); let sender = funded_pair(); let receiver = unfunded_public(); @@ -298,7 +300,7 @@ fn test_create_block_and_confirmation_works() { .sign(&sender.into(), 0, &mrenclave, &shard); let top_hash = submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call), &shielding_key, shard, @@ -339,28 +341,25 @@ fn test_create_block_and_confirmation_works() { let expected_call = OpaqueCall::from_tuple(&( [TEEREX_MODULE, PROPOSED_SIDECHAIN_BLOCK], shard, - blake2_256(&signed_block.block().encode()), + blake2_256(&signed_block.block().header().encode()), )); assert!(signed_block.verify_signature()); - assert_eq!(signed_block.block().block_number(), 1); - assert_eq!(signed_block.block().signed_top_hashes()[0], top_hash); + assert_eq!(signed_block.block().header().block_number(), 1); + assert_eq!(signed_block.block().block_data().signed_top_hashes()[0], top_hash); assert!(opaque_call.encode().starts_with(&expected_call.encode())); } fn test_create_state_diff() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); let top_pool_executor = TopPoolOperationHandler::::new( - rpc_author.clone(), + top_pool_author.clone(), stf_executor.clone(), ); - let block_composer = BlockComposer::::new( - test_account(), - state_key(), - rpc_author.clone(), - ); + let block_composer = + BlockComposer::::new(test_account(), state_key()); let sender = funded_pair(); let receiver = unfunded_public(); @@ -369,7 +368,7 @@ fn test_create_state_diff() { .sign(&sender.clone().into(), 0, &mrenclave, &shard); submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call), &shielding_key, shard, @@ -406,8 +405,10 @@ fn test_create_state_diff() { ) .unwrap(); - let state_payload = state_payload_from_encrypted(signed_block.block().state_payload()); - let state_diff = state_payload.state_update(); + let encrypted_state_diff = encrypted_state_diff_from_encrypted( + signed_block.block().block_data().encrypted_state_diff(), + ); + let state_diff = encrypted_state_diff.state_update(); // then let sender_acc_info: AccountInfo = @@ -425,10 +426,10 @@ fn test_create_state_diff() { fn test_executing_call_updates_account_nonce() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); let top_pool_executor = TopPoolOperationHandler::::new( - rpc_author.clone(), + top_pool_author.clone(), stf_executor.clone(), ); @@ -439,7 +440,7 @@ fn test_executing_call_updates_account_nonce() { .sign(&sender.clone().into(), 0, &mrenclave, &shard); submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call), &shielding_key, shard, @@ -461,14 +462,14 @@ fn test_executing_call_updates_account_nonce() { } // then - let mut state = state_handler.load_initialized(&shard).unwrap(); + let mut state = state_handler.load(&shard).unwrap(); let nonce = Stf::account_nonce(&mut state, &sender.public().into()); assert_eq!(nonce, 1); } fn test_call_set_update_parentchain_block() { let (_, _, shard, _, _, state_handler) = test_setup(); - let mut state = state_handler.load_initialized(&shard).unwrap(); + let mut state = state_handler.load(&shard).unwrap(); let block_number = 3; let parent_hash = H256::from([1; 32]); @@ -490,10 +491,10 @@ fn test_call_set_update_parentchain_block() { fn test_invalid_nonce_call_is_not_executed() { // given - let (rpc_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); + let (top_pool_author, _, shard, mrenclave, shielding_key, state_handler) = test_setup(); let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); let top_pool_executor = TopPoolOperationHandler::::new( - rpc_author.clone(), + top_pool_author.clone(), stf_executor.clone(), ); @@ -505,7 +506,7 @@ fn test_invalid_nonce_call_is_not_executed() { .sign(&sender.clone().into(), 10, &mrenclave, &shard); submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call), &shielding_key, shard, @@ -530,10 +531,10 @@ fn test_invalid_nonce_call_is_not_executed() { fn test_non_root_shielding_call_is_not_executed() { // given - let (rpc_author, _state, shard, mrenclave, shielding_key, state_handler) = test_setup(); + let (top_pool_author, _state, shard, mrenclave, shielding_key, state_handler) = test_setup(); let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); let top_pool_executor = TopPoolOperationHandler::::new( - rpc_author.clone(), + top_pool_author.clone(), stf_executor.clone(), ); @@ -544,7 +545,7 @@ fn test_non_root_shielding_call_is_not_executed() { .sign(&sender.into(), 0, &mrenclave, &shard); submit_operation_to_top_pool( - rpc_author.as_ref(), + top_pool_author.as_ref(), &direct_top(signed_call), &shielding_key, shard, @@ -576,7 +577,7 @@ pub fn test_top_pool() -> TestTopPool { } /// Decrypt `encrypted` and decode it into `StatePayload` -pub fn state_payload_from_encrypted(encrypted: &[u8]) -> StatePayload { +pub fn encrypted_state_diff_from_encrypted(encrypted: &[u8]) -> StatePayload { let mut encrypted_payload: Vec = encrypted.to_vec(); let state_key = state_key(); state_key.decrypt(&mut encrypted_payload).unwrap(); @@ -590,7 +591,7 @@ pub fn state_key() -> Aes { /// Returns all the things that are commonly used in tests and runs /// `ensure_no_empty_shard_directory_exists` pub fn test_setup() -> ( - Arc, + Arc, State, ShardIdentifier, MrEnclave, @@ -604,7 +605,7 @@ pub fn test_setup() -> ( let encryption_key = ShieldingCryptoMock::default(); ( - Arc::new(TestRpcAuthor::new( + Arc::new(TestTopPoolAuthor::new( Arc::new(top_pool), AllowAllTopsFilter, state_handler.clone(), diff --git a/enclave-runtime/src/tls_ra/mocks.rs b/enclave-runtime/src/tls_ra/mocks.rs index c86b76a165..18e82827c0 100644 --- a/enclave-runtime/src/tls_ra/mocks.rs +++ b/enclave-runtime/src/tls_ra/mocks.rs @@ -26,17 +26,17 @@ use std::{ #[derive(Clone)] pub struct SealHandlerMock { pub shielding_key: Arc>>, - pub signing_key: Arc>>, + pub state_key: Arc>>, pub state: Arc>>, } impl SealHandlerMock { pub fn new( shielding_key: Arc>>, - signing_key: Arc>>, + state_key: Arc>>, state: Arc>>, ) -> Self { - Self { shielding_key, signing_key, state } + Self { shielding_key, state_key, state } } } @@ -46,8 +46,8 @@ impl SealStateAndKeys for SealHandlerMock { Ok(()) } - fn seal_signing_key(&self, bytes: &[u8]) -> EnclaveResult<()> { - *self.signing_key.write().unwrap() = bytes.to_vec(); + fn seal_state_key(&self, bytes: &[u8]) -> EnclaveResult<()> { + *self.state_key.write().unwrap() = bytes.to_vec(); Ok(()) } @@ -62,8 +62,8 @@ impl UnsealStateAndKeys for SealHandlerMock { Ok(self.shielding_key.read().unwrap().clone()) } - fn unseal_signing_key(&self) -> EnclaveResult> { - Ok(self.signing_key.read().unwrap().clone()) + fn unseal_state_key(&self) -> EnclaveResult> { + Ok(self.state_key.read().unwrap().clone()) } fn unseal_state(&self, _shard: &ShardIdentifier) -> EnclaveResult> { diff --git a/enclave-runtime/src/tls_ra/mod.rs b/enclave-runtime/src/tls_ra/mod.rs index 3496237564..153f590686 100644 --- a/enclave-runtime/src/tls_ra/mod.rs +++ b/enclave-runtime/src/tls_ra/mod.rs @@ -47,7 +47,7 @@ impl TcpHeader { #[derive(Copy, Clone, Debug)] pub enum Opcode { ShieldingKey = 0, - SigningKey = 1, + StateKey = 1, State = 2, } @@ -55,7 +55,7 @@ impl From for Opcode { fn from(item: u8) -> Self { match item { 0 => Opcode::ShieldingKey, - 1 => Opcode::SigningKey, + 1 => Opcode::StateKey, 2 => Opcode::State, _ => unimplemented!(), } diff --git a/enclave-runtime/src/tls_ra/seal_handler.rs b/enclave-runtime/src/tls_ra/seal_handler.rs index a864ec3fdc..4303b8efec 100644 --- a/enclave-runtime/src/tls_ra/seal_handler.rs +++ b/enclave-runtime/src/tls_ra/seal_handler.rs @@ -16,64 +16,70 @@ */ //! Abstraction of the reading (unseal) and storing (seal) part of the -//! shielding key, signing key and state. +//! shielding key, state key and state. use crate::error::{Error as EnclaveError, Result as EnclaveResult}; use codec::{Decode, Encode}; use ita_stf::{State as StfState, StateType as StfStateType}; -use itp_sgx_crypto::{Aes, AesSeal, Error as CryptoError}; -use itp_sgx_io::SealedIO; -use itp_stf_state_handler::handle_state::HandleState; +use itp_sgx_crypto::{Aes, Error as CryptoError}; +use itp_sgx_io::StaticSealedIO; +use itp_stf_state_handler::{ + handle_state::HandleState, + state_key_repository::{AccessStateKey, MutateStateKey}, +}; use itp_types::ShardIdentifier; use log::*; use sgx_crypto_helper::rsa3072::Rsa3072KeyPair; use std::{marker::PhantomData, sync::Arc, vec::Vec}; -pub trait SealedIOForShieldingKey = SealedIO; -pub trait SealedIOForSigningKey = SealedIO; +pub trait SealedIOForShieldingKey = StaticSealedIO; -/// Handles the sealing and unsealing of the shielding key, signing key and the state. +/// Handles the sealing and unsealing of the shielding key, state key and the state. #[derive(Default)] -pub struct SealHandler +pub struct SealHandler where ShieldingKeyHandler: SealedIOForShieldingKey, - SigningKeyHandler: SealedIOForSigningKey, + StateKeyRepository: AccessStateKey + MutateStateKey, // Constraint StateT = StfState currently necessary because SgxExternalities Encode/Decode does not work. // See https://github.com/integritee-network/sgx-runtime/issues/46. StateHandler: HandleState, { state_handler: Arc, - _phantom_key_handler: PhantomData<(ShieldingKeyHandler, SigningKeyHandler)>, + state_key_repository: Arc, + _phantom_key_handler: PhantomData, } -impl - SealHandler +impl + SealHandler where ShieldingKeyHandler: SealedIOForShieldingKey, - SigningKeyHandler: SealedIOForSigningKey, + StateKeyRepository: AccessStateKey + MutateStateKey, StateHandler: HandleState, { - pub fn new(state_handler: Arc) -> Self { - Self { state_handler, _phantom_key_handler: Default::default() } + pub fn new( + state_handler: Arc, + state_key_repository: Arc, + ) -> Self { + Self { state_handler, state_key_repository, _phantom_key_handler: Default::default() } } } pub trait SealStateAndKeys { fn seal_shielding_key(&self, bytes: &[u8]) -> EnclaveResult<()>; - fn seal_signing_key(&self, bytes: &[u8]) -> EnclaveResult<()>; + fn seal_state_key(&self, bytes: &[u8]) -> EnclaveResult<()>; fn seal_state(&self, bytes: &[u8], shard: &ShardIdentifier) -> EnclaveResult<()>; } pub trait UnsealStateAndKeys { fn unseal_shielding_key(&self) -> EnclaveResult>; - fn unseal_signing_key(&self) -> EnclaveResult>; - fn unseal_state(&self, state: &ShardIdentifier) -> EnclaveResult>; + fn unseal_state_key(&self) -> EnclaveResult>; + fn unseal_state(&self, shard: &ShardIdentifier) -> EnclaveResult>; } -impl SealStateAndKeys - for SealHandler +impl SealStateAndKeys + for SealHandler where ShieldingKeyHandler: SealedIOForShieldingKey, - SigningKeyHandler: SealedIOForSigningKey, + StateKeyRepository: AccessStateKey + MutateStateKey, StateHandler: HandleState, { fn seal_shielding_key(&self, bytes: &[u8]) -> EnclaveResult<()> { @@ -81,47 +87,49 @@ where error!(" [Enclave] Received Invalid RSA key"); EnclaveError::Other(e.into()) })?; - ShieldingKeyHandler::seal(key)?; + ShieldingKeyHandler::seal_to_static_file(key)?; info!("Successfully stored a new shielding key"); Ok(()) } - fn seal_signing_key(&self, mut bytes: &[u8]) -> EnclaveResult<()> { + fn seal_state_key(&self, mut bytes: &[u8]) -> EnclaveResult<()> { let aes = Aes::decode(&mut bytes)?; - AesSeal::seal(Aes::new(aes.key, aes.init_vec))?; - info!("Successfully stored a new signing key"); + self.state_key_repository.update_key(aes)?; + info!("Successfully stored a new state key"); Ok(()) } fn seal_state(&self, mut bytes: &[u8], shard: &ShardIdentifier) -> EnclaveResult<()> { let state = StfStateType::decode(&mut bytes)?; let state_with_empty_diff = StfState { state, state_diff: Default::default() }; - let (state_lock, _) = self.state_handler.load_for_mutation(shard)?; - self.state_handler.write(state_with_empty_diff, state_lock, shard)?; + self.state_handler.reset(state_with_empty_diff, shard)?; info!("Successfully updated shard {:?} with provisioned state", shard); Ok(()) } } -impl UnsealStateAndKeys - for SealHandler +impl UnsealStateAndKeys + for SealHandler where ShieldingKeyHandler: SealedIOForShieldingKey, - SigningKeyHandler: SealedIOForSigningKey, + StateKeyRepository: AccessStateKey + MutateStateKey, StateHandler: HandleState, { fn unseal_shielding_key(&self) -> EnclaveResult> { - let shielding_key = ShieldingKeyHandler::unseal()?; + let shielding_key = ShieldingKeyHandler::unseal_from_static_file()?; serde_json::to_vec(&shielding_key).map_err(|e| EnclaveError::Other(e.into())) } - fn unseal_signing_key(&self) -> EnclaveResult> { - Ok(AesSeal::unseal()?.encode()) + fn unseal_state_key(&self) -> EnclaveResult> { + self.state_key_repository + .retrieve_key() + .map(|k| k.encode()) + .map_err(|e| EnclaveError::Other(e.into())) } fn unseal_state(&self, shard: &ShardIdentifier) -> EnclaveResult> { - let state = self.state_handler.load_initialized(shard)?; + let state = self.state_handler.load(shard)?; Ok(state.state.encode()) } } @@ -129,11 +137,13 @@ where #[cfg(feature = "test")] pub mod test { use super::*; - use itp_sgx_crypto::mocks::{AesSealMock, Rsa3072SealMock}; + use itp_sgx_crypto::mocks::sgx::Rsa3072SealMock; + use itp_stf_state_handler::test::mocks::state_key_repository_mock::StateKeyRepositoryMock; use itp_test::mock::handle_state_mock::HandleStateMock; use sgx_externalities::SgxExternalitiesTrait; - type SealHandlerMock = SealHandler; + type SealHandlerMock = + SealHandler, HandleStateMock>; pub fn seal_shielding_key_works() { let seal_handler = SealHandlerMock::default(); @@ -162,28 +172,28 @@ pub mod test { assert!(result.is_ok()); } - pub fn seal_signing_key_works() { + pub fn seal_state_key_works() { let seal_handler = SealHandlerMock::default(); let key_pair_in_bytes = Aes::default().encode(); - let result = seal_handler.seal_signing_key(&key_pair_in_bytes); + let result = seal_handler.seal_state_key(&key_pair_in_bytes); assert!(result.is_ok()); } - pub fn seal_signing_key_fails_for_invalid_key() { + pub fn seal_state_key_fails_for_invalid_key() { let seal_handler = SealHandlerMock::default(); - let result = seal_handler.seal_signing_key(&[1, 2, 3]); + let result = seal_handler.seal_state_key(&[1, 2, 3]); assert!(result.is_err()); } - pub fn unseal_seal_signing_key_works() { + pub fn unseal_seal_state_key_works() { let seal_handler = SealHandlerMock::default(); - let key_pair_in_bytes = seal_handler.unseal_signing_key().unwrap(); + let key_pair_in_bytes = seal_handler.unseal_state_key().unwrap(); - let result = seal_handler.seal_signing_key(&key_pair_in_bytes); + let result = seal_handler.seal_state_key(&key_pair_in_bytes); assert!(result.is_ok()); } @@ -192,6 +202,7 @@ pub mod test { let seal_handler = SealHandlerMock::default(); let state = ::StateT::default(); let shard = ShardIdentifier::default(); + let _init_hash = seal_handler.state_handler.initialize_shard(shard).unwrap(); let result = seal_handler.seal_state(&state.encode(), &shard); @@ -210,11 +221,12 @@ pub mod test { pub fn unseal_seal_state_works() { let seal_handler = SealHandlerMock::default(); let shard = ShardIdentifier::default(); + seal_handler.state_handler.initialize_shard(shard).unwrap(); // Fill our mock state: let (lock, mut state) = seal_handler.state_handler.load_for_mutation(&shard).unwrap(); let (key, value) = ("my_key", "my_value"); state.insert(key.encode(), value.encode()); - seal_handler.state_handler.write(state, lock, &shard).unwrap(); + seal_handler.state_handler.write_after_mutation(state, lock, &shard).unwrap(); let state_in_bytes = seal_handler.unseal_state(&shard).unwrap(); diff --git a/enclave-runtime/src/tls_ra/tests.rs b/enclave-runtime/src/tls_ra/tests.rs index 286ae204a9..7c60cd444b 100644 --- a/enclave-runtime/src/tls_ra/tests.rs +++ b/enclave-runtime/src/tls_ra/tests.rs @@ -52,21 +52,21 @@ fn run_state_provisioning_server(seal_handler: SealHandlerMock) { pub fn test_tls_ra_server_client_networking() { let shard = ShardIdentifier::default(); let shielding_key = vec![1, 2, 3]; - let signing_key = vec![5, 2, 3]; + let state_key = vec![5, 2, 3]; let state = vec![5, 2, 3, 10, 21, 0, 9, 1]; let server_seal_handler = SealHandlerMock::new( Arc::new(RwLock::new(shielding_key.clone())), - Arc::new(RwLock::new(signing_key.clone())), + Arc::new(RwLock::new(state_key.clone())), Arc::new(RwLock::new(state.clone())), ); let client_shielding_key = Arc::new(RwLock::new(Vec::new())); - let client_signing_key = Arc::new(RwLock::new(Vec::new())); + let client_state_key = Arc::new(RwLock::new(Vec::new())); let client_state = Arc::new(RwLock::new(Vec::new())); let client_seal_handler = SealHandlerMock::new( client_shielding_key.clone(), - client_signing_key.clone(), + client_state_key.clone(), client_state.clone(), ); @@ -91,6 +91,6 @@ pub fn test_tls_ra_server_client_networking() { assert!(result.is_ok()); assert_eq!(*client_shielding_key.read().unwrap(), shielding_key); - assert_eq!(*client_signing_key.read().unwrap(), signing_key); + assert_eq!(*client_state_key.read().unwrap(), state_key); assert_eq!(*client_state.read().unwrap(), state); } diff --git a/enclave-runtime/src/tls_ra/tls_ra_client.rs b/enclave-runtime/src/tls_ra/tls_ra_client.rs index da9be88a87..3b6d7537db 100644 --- a/enclave-runtime/src/tls_ra/tls_ra_client.rs +++ b/enclave-runtime/src/tls_ra/tls_ra_client.rs @@ -21,12 +21,14 @@ use super::{authentication::ServerAuth, Opcode, TcpHeader}; use crate::{ attestation::{create_ra_report_and_signature, DEV_HOSTNAME}, error::{Error as EnclaveError, Result as EnclaveResult}, + global_components::GLOBAL_STATE_KEY_REPOSITORY_COMPONENT, ocall::OcallApi, tls_ra::seal_handler::{SealHandler, SealStateAndKeys}, + GLOBAL_STATE_HANDLER_COMPONENT, }; +use itp_component_container::ComponentGetter; use itp_ocall_api::EnclaveAttestationOCallApi; -use itp_sgx_crypto::{AesSeal, Rsa3072Seal}; -use itp_stf_state_handler::GlobalFileStateHandler; +use itp_sgx_crypto::Rsa3072Seal; use itp_types::ShardIdentifier; use log::*; use rustls::{ClientConfig, ClientSession, Stream}; @@ -101,7 +103,7 @@ where let bytes = self.read_until(header.payload_length as usize)?; match header.opcode { Opcode::ShieldingKey => self.seal_handler.seal_shielding_key(&bytes)?, - Opcode::SigningKey => self.seal_handler.seal_signing_key(&bytes)?, + Opcode::StateKey => self.seal_handler.seal_state_key(&bytes)?, Opcode::State => self.seal_handler.seal_state(&bytes, &self.shard)?, }; Ok(true) @@ -139,8 +141,23 @@ pub unsafe extern "C" fn request_state_provisioning( let _ = backtrace::enable_backtrace("enclave.signed.so", PrintFormat::Short); let shard = ShardIdentifier::from_slice(slice::from_raw_parts(shard, shard_size as usize)); - let state_handler = Arc::new(GlobalFileStateHandler); - let seal_handler = SealHandler::::new(state_handler); + let state_handler = match GLOBAL_STATE_HANDLER_COMPONENT.get() { + Ok(s) => s, + Err(e) => { + error!("{:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + }; + + let state_key_repository = match GLOBAL_STATE_KEY_REPOSITORY_COMPONENT.get() { + Ok(s) => s, + Err(e) => { + error!("{:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + }; + + let seal_handler = SealHandler::::new(state_handler, state_key_repository); if let Err(e) = request_state_provisioning_internal(socket_fd, sign_type, shard, skip_ra, seal_handler) diff --git a/enclave-runtime/src/tls_ra/tls_ra_server.rs b/enclave-runtime/src/tls_ra/tls_ra_server.rs index dbaef30be8..b7800c10c1 100644 --- a/enclave-runtime/src/tls_ra/tls_ra_server.rs +++ b/enclave-runtime/src/tls_ra/tls_ra_server.rs @@ -21,12 +21,14 @@ use super::{authentication::ClientAuth, Opcode, TcpHeader}; use crate::{ attestation::create_ra_report_and_signature, error::{Error as EnclaveError, Result as EnclaveResult}, + global_components::GLOBAL_STATE_KEY_REPOSITORY_COMPONENT, ocall::OcallApi, tls_ra::seal_handler::{SealHandler, UnsealStateAndKeys}, + GLOBAL_STATE_HANDLER_COMPONENT, }; +use itp_component_container::ComponentGetter; use itp_ocall_api::EnclaveAttestationOCallApi; -use itp_sgx_crypto::{AesSeal, Rsa3072Seal}; -use itp_stf_state_handler::GlobalFileStateHandler; +use itp_sgx_crypto::Rsa3072Seal; use itp_types::ShardIdentifier; use log::*; use rustls::{ServerConfig, ServerSession, Stream}; @@ -37,6 +39,7 @@ use std::{ net::TcpStream, sync::Arc, }; + /// Server part of the TCP-level connection and the underlying TLS-level session. /// /// Includes a seal handler, which handles the reading part of the data to be sent. @@ -73,10 +76,10 @@ where /// Sends all relevant data to the client. fn write_all(&mut self, shard: &ShardIdentifier) -> EnclaveResult<()> { let shielding_key = self.seal_handler.unseal_shielding_key()?; - let signing_key = self.seal_handler.unseal_signing_key()?; + let state_key = self.seal_handler.unseal_state_key()?; let state = self.seal_handler.unseal_state(shard)?; self.write(Opcode::ShieldingKey, &shielding_key)?; - self.write(Opcode::SigningKey, &signing_key)?; + self.write(Opcode::StateKey, &state_key)?; self.write(Opcode::State, &state)?; Ok(()) } @@ -104,8 +107,23 @@ pub unsafe extern "C" fn run_state_provisioning_server( ) -> sgx_status_t { let _ = backtrace::enable_backtrace("enclave.signed.so", PrintFormat::Short); - let state_handler = Arc::new(GlobalFileStateHandler); - let seal_handler = SealHandler::::new(state_handler); + let state_handler = match GLOBAL_STATE_HANDLER_COMPONENT.get() { + Ok(s) => s, + Err(e) => { + error!("{:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + }; + + let state_key_repository = match GLOBAL_STATE_KEY_REPOSITORY_COMPONENT.get() { + Ok(s) => s, + Err(e) => { + error!("{:?}", e); + return sgx_status_t::SGX_ERROR_UNEXPECTED + }, + }; + + let seal_handler = SealHandler::::new(state_handler, state_key_repository); if let Err(e) = run_state_provisioning_server_internal(socket_fd, sign_type, skip_ra, seal_handler) diff --git a/enclave-runtime/src/top_pool_execution.rs b/enclave-runtime/src/top_pool_execution.rs index 5780098619..f0827a11ff 100644 --- a/enclave-runtime/src/top_pool_execution.rs +++ b/enclave-runtime/src/top_pool_execution.rs @@ -16,14 +16,15 @@ */ use crate::{ - error::{Error, Result}, + error::Result, global_components::{ - GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT, GLOBAL_RPC_AUTHOR_COMPONENT, - GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT, + GLOBAL_EXTRINSICS_FACTORY_COMPONENT, GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT, + GLOBAL_SIDECHAIN_BLOCK_COMPOSER_COMPONENT, GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT, + GLOBAL_STATE_HANDLER_COMPONENT, GLOBAL_STF_EXECUTOR_COMPONENT, + GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT, }, ocall::OcallApi, sync::{EnclaveLock, EnclaveStateRWLock}, - GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT, }; use codec::Encode; use itc_parentchain::{ @@ -36,23 +37,21 @@ use itc_parentchain::{ }, }; use itp_component_container::ComponentGetter; -use itp_extrinsics_factory::{CreateExtrinsics, ExtrinsicsFactory}; -use itp_nonce_cache::GLOBAL_NONCE_CACHE; +use itp_extrinsics_factory::CreateExtrinsics; use itp_ocall_api::{EnclaveOnChainOCallApi, EnclaveSidechainOCallApi}; use itp_settings::sidechain::SLOT_DURATION; -use itp_sgx_crypto::{AesSeal, Ed25519Seal}; -use itp_sgx_io::SealedIO; -use itp_stf_executor::executor::StfExecutor; -use itp_stf_state_handler::{query_shard_state::QueryShardState, GlobalFileStateHandler}; -use itp_storage_verifier::GetStorageVerified; +use itp_sgx_crypto::Ed25519Seal; +use itp_sgx_io::StaticSealedIO; +use itp_stf_state_handler::query_shard_state::QueryShardState; use itp_time_utils::{duration_now, remaining_time}; use itp_types::{Block, OpaqueCall, H256}; use its_sidechain::{ aura::{proposer_factory::ProposerFactory, Aura, SlotClaimStrategy}, - block_composer::BlockComposer, consensus_common::{Environment, Error as ConsensusError, ProcessBlockImportQueue}, primitives::{ - traits::{Block as SidechainBlockT, ShardIdentifierFor, SignedBlock}, + traits::{ + Block as SidechainBlockTrait, Header as HeaderTrait, ShardIdentifierFor, SignedBlock, + }, types::block::SignedBlock as SignedSidechainBlock, }, slots::{sgx::LastSlotSeal, yield_next_slot, PerShardSlotWorkerScheduler, SlotInfo}, @@ -82,14 +81,10 @@ pub unsafe extern "C" fn execute_trusted_getters() -> sgx_status_t { fn execute_top_pool_trusted_getters_on_all_shards() -> Result<()> { use itp_settings::enclave::MAX_TRUSTED_GETTERS_EXEC_DURATION; - let top_pool_executor = - GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.get().ok_or_else(|| { - error!("Failed to retrieve top pool operation handler component. It might not be initialized?"); - Error::ComponentNotInitialized - })?; - - let state_handler = Arc::new(GlobalFileStateHandler); + let top_pool_executor = GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.get()?; + let state_handler = GLOBAL_STATE_HANDLER_COMPONENT.get()?; let shards = state_handler.list_shards()?; + let mut remaining_shards = shards.len() as u32; let ends_at = duration_now() + MAX_TRUSTED_GETTERS_EXEC_DURATION; @@ -143,50 +138,36 @@ fn execute_top_pool_trusted_calls_internal() -> Result<()> { let slot_beginning_timestamp = duration_now(); - let parentchain_import_dispatcher = GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT - .get() - .ok_or(Error::ComponentNotInitialized)?; + let parentchain_import_dispatcher = GLOBAL_PARENTCHAIN_IMPORT_DISPATCHER_COMPONENT.get()?; let validator_access = ValidatorAccessor::::default(); // This gets the latest imported block. We accept that all of AURA, up until the block production // itself, will operate on a parentchain block that is potentially outdated by one block // (in case we have a block in the queue, but not imported yet). - let (current_parentchain_header, genesis_hash) = - validator_access.execute_on_validator(|v| { - let latest_parentchain_header = v.latest_finalized_header(v.num_relays())?; - let genesis_hash = v.genesis_hash(v.num_relays())?; - Ok((latest_parentchain_header, genesis_hash)) - })?; + let current_parentchain_header = validator_access.execute_on_validator(|v| { + let latest_parentchain_header = v.latest_finalized_header(v.num_relays())?; + Ok(latest_parentchain_header) + })?; // Import any sidechain blocks that are in the import queue. In case we are missing blocks, // a peer sync will happen. If that happens, the slot time might already be used up just by this import. - let sidechain_block_import_queue_worker = GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT - .get() - .ok_or(Error::ComponentNotInitialized)?; + let sidechain_block_import_queue_worker = + GLOBAL_SIDECHAIN_IMPORT_QUEUE_WORKER_COMPONENT.get()?; + let latest_parentchain_header = sidechain_block_import_queue_worker.process_queue(¤t_parentchain_header)?; - let authority = Ed25519Seal::unseal()?; - let state_key = AesSeal::unseal()?; + let stf_executor = GLOBAL_STF_EXECUTOR_COMPONENT.get()?; - let state_handler = Arc::new(GlobalFileStateHandler); - let stf_executor = Arc::new(StfExecutor::new(Arc::new(OcallApi), state_handler.clone())); - let extrinsics_factory = - ExtrinsicsFactory::new(genesis_hash, authority.clone(), GLOBAL_NONCE_CACHE.clone()); + let top_pool_executor = GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.get()?; - let top_pool_executor = - GLOBAL_TOP_POOL_OPERATION_HANDLER_COMPONENT.get().ok_or_else(|| { - error!("Failed to retrieve top pool operation handler component. Maybe it's not initialized?"); - Error::ComponentNotInitialized - })?; + let block_composer = GLOBAL_SIDECHAIN_BLOCK_COMPOSER_COMPONENT.get()?; - let rpc_author = GLOBAL_RPC_AUTHOR_COMPONENT.get().ok_or_else(|| { - error!("Failed to retrieve rpc author component. Maybe it's not initialized?"); - Error::ComponentNotInitialized - })?; + let extrinsics_factory = GLOBAL_EXTRINSICS_FACTORY_COMPONENT.get()?; + let state_handler = GLOBAL_STATE_HANDLER_COMPONENT.get()?; - let block_composer = Arc::new(BlockComposer::new(authority.clone(), state_key, rpc_author)); + let authority = Ed25519Seal::unseal_from_static_file()?; match yield_next_slot( slot_beginning_timestamp, @@ -211,6 +192,8 @@ fn execute_top_pool_trusted_calls_internal() -> Result<()> { shards, )?; + debug!("Aura executed successfully"); + // Drop lock as soon as we don't need it anymore. drop(_enclave_write_lock); @@ -219,7 +202,7 @@ fn execute_top_pool_trusted_calls_internal() -> Result<()> { opaque_calls, OcallApi, &validator_access, - &extrinsics_factory, + extrinsics_factory.as_ref(), )? }, None => { @@ -228,6 +211,7 @@ fn execute_top_pool_trusted_calls_internal() -> Result<()> { }, }; + debug!("End sidechain block production cycle"); Ok(()) } @@ -251,12 +235,13 @@ where ParentchainBlock: BlockTrait, SignedSidechainBlock: SignedBlock + 'static, // Setting the public type is necessary due to some non-generic downstream code. - SignedSidechainBlock::Block: - SidechainBlockT, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, SignedSidechainBlock::Signature: From, Authority: Pair, Authority::Public: Encode, - OCallApi: ValidateerFetch + GetStorageVerified + Send + 'static, + OCallApi: ValidateerFetch + EnclaveOnChainOCallApi + Send + 'static, NumberFor: BlockNumberOps, PEnvironment: Environment + Send + Sync, @@ -306,10 +291,12 @@ where NumberFor: BlockNumberOps, ExtrinsicsFactory: CreateExtrinsics, { + debug!("Proposing {} sidechain block(s) (broadcasting to peers)", blocks.len()); ocall_api.propose_sidechain_blocks(blocks)?; let xts = extrinsics_factory.create_extrinsics(opaque_calls.as_slice())?; + debug!("Sending sidechain block(s) confirmation extrinsic.. "); validator_access.execute_mut_on_validator(|v| v.send_extrinsics(&ocall_api, xts))?; Ok(()) } diff --git a/local-setup/github-action-config.json b/local-setup/github-action-config.json index d355512310..f693bf6710 100644 --- a/local-setup/github-action-config.json +++ b/local-setup/github-action-config.json @@ -16,7 +16,9 @@ "-w", "2001", "-r", - "3443" + "3443", + "-h", + "4545" ], "subcommand_flags": [ "--skip-ra", @@ -31,7 +33,9 @@ "-w", "3001", "-r", - "3444" + "3444", + "-h", + "4546" ], "subcommand_flags": [ "--skip-ra", diff --git a/local-setup/simple-config.json b/local-setup/simple-config.json index 2a973e836a..56b51ef2e8 100644 --- a/local-setup/simple-config.json +++ b/local-setup/simple-config.json @@ -24,7 +24,9 @@ "-r", "3490", "-w", - "2091" + "2091", + "-h", + "4545" ], "subcommand_flags": [ "--skip-ra", @@ -41,7 +43,9 @@ "-r", "3590", "-w", - "3091" + "3091", + "-h", + "4546" ], "subcommand_flags": [ "--skip-ra", diff --git a/local-setup/tutorial-config.json b/local-setup/tutorial-config.json new file mode 100644 index 0000000000..ec05446c1a --- /dev/null +++ b/local-setup/tutorial-config.json @@ -0,0 +1,52 @@ +{ + "node": { + "bin": "../integritee-node/target/release/integritee-node", + "flags": [ + "--tmp", + "--dev", + "-lruntime=info", + "--ws-port", + "9944", + "--port", + "30390", + "--rpc-port", + "8990" + ] + }, + "workers": [ + { + "source": "bin", + "flags": [ + "-P", + "2000", + "-p", + "9944", + "-w", + "2001", + "-r", + "3443" + ], + "subcommand_flags": [ + "--skip-ra", + "--dev" + ] + }, + { + "source": "bin", + "flags": [ + "-P", + "3000", + "-p", + "9944", + "-w", + "3001", + "-r", + "3444" + ], + "subcommand_flags": [ + "--skip-ra", + "--dev" + ] + } + ] +} diff --git a/rust-sgx-sdk/edl/intel/sgx_ttls.edl b/rust-sgx-sdk/edl/intel/sgx_ttls.edl new file mode 100644 index 0000000000..ca0906f578 --- /dev/null +++ b/rust-sgx-sdk/edl/intel/sgx_ttls.edl @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2011-2021 Intel Corporation. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Intel Corporation nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ + +enclave{ + include "sgx_report.h" + include "sgx_qve_header.h" + include "sgx_ql_lib_common.h" + include "sgx_ql_quote.h" + + untrusted { + quote3_error_t sgx_tls_get_qe_target_info_ocall([size = target_info_size, out] sgx_target_info_t *p_target_info, + size_t target_info_size); + + quote3_error_t sgx_tls_get_quote_size_ocall([out] uint32_t *p_quote_size); + + quote3_error_t sgx_tls_get_quote_ocall([size = report_size, in] sgx_report_t* p_report, + size_t report_size, + [size = quote_size, out] uint8_t *p_quote, + uint32_t quote_size); + + quote3_error_t sgx_tls_get_supplemental_data_size_ocall([out] uint32_t *p_supplemental_data_size); + + quote3_error_t sgx_tls_verify_quote_ocall( + [size = quote_size, in] const uint8_t *p_quote, + uint32_t quote_size, + time_t expiration_check_date, + [out] sgx_ql_qv_result_t *p_quote_verification_result, + [size = qve_report_info_size, in, out] sgx_ql_qe_report_info_t *p_qve_report_info, + size_t qve_report_info_size, + [size = supplemental_data_size, out] uint8_t *p_supplemental_data, + uint32_t supplemental_data_size); + + }; +}; diff --git a/rust-sgx-sdk/version b/rust-sgx-sdk/version index 4567a1e581..73a1e6b038 100644 --- a/rust-sgx-sdk/version +++ b/rust-sgx-sdk/version @@ -1 +1 @@ -565960cd7b4b36d1188459d75652619971c43f7e +08264d6bff679d6047e5e9bc36058b4475c58ed4 diff --git a/service/Cargo.toml b/service/Cargo.toml index 218a266150..33d22fa11c 100644 --- a/service/Cargo.toml +++ b/service/Cargo.toml @@ -34,8 +34,8 @@ multihash = "0.8" cid = "<0.3.1" sha2 = { version = "0.7", default-features = false } -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -primitive-types = { version = "0.10.1", default-features = false, features = ["codec"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } +primitive-types = { version = "0.11.1", default-features = false, features = ["codec"] } sgx_urts = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git" } @@ -49,6 +49,7 @@ itp-node-api-extensions = { path = "../core-primitives/node-api-extensions" } itp-enclave-api = { path = "../core-primitives/enclave-api" } itp-enclave-metrics = { path = "../core-primitives/enclave-metrics" } itp-settings = { path = "../core-primitives/settings" } +itp-stf-state-handler = { path = "../core-primitives/stf-state-handler" } itp-test = { path = "../core-primitives/test" } itp-types = { path = "../core-primitives/types" } its-consensus-slots = { path = "../sidechain/consensus/slots" } @@ -57,18 +58,18 @@ its-primitives = { path = "../sidechain/primitives"} its-storage = { path = "../sidechain/storage" } # scs / integritee -substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "master" } -my-node-runtime = { package = "ajuna-runtime", git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +substrate-api-client = { git = "https://github.com/scs/substrate-api-client", branch = "polkadot-v0.9.19" } +my-node-runtime = { package = "ajuna-solo-runtime", git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } teerex-primitives = { git = "https://github.com/integritee-network/pallets.git", branch = "master" } # Substrate dependencies -sp-runtime = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -pallet-balances = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } -frame-system = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } -frame-support = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-runtime = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +pallet-balances = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +frame-system = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-finality-grandpa = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +frame-support = { version = "4.0.0-dev", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } [features] diff --git a/service/src/cli.yml b/service/src/cli.yml index 5d751ff72f..dc8efb61c8 100644 --- a/service/src/cli.yml +++ b/service/src/cli.yml @@ -65,7 +65,7 @@ args: required: false - enable-metrics: long: enable-metrics - help: Enable the metrics http server to serve metrics + help: Enable the metrics HTTP server to serve metrics - metrics-port: short: i long: metrics-port @@ -73,6 +73,12 @@ args: takes_value: true default_value: "8787" required: false + - untrusted-http-port: + short: h + long: untrusted-http-port + help: Set the port for the untrusted HTTP server + takes_value: true + required: false subcommands: - run: diff --git a/service/src/config.rs b/service/src/config.rs index 81a9705071..7eb00aaa66 100644 --- a/service/src/config.rs +++ b/service/src/config.rs @@ -7,6 +7,7 @@ static DEFAULT_TRUSTED_PORT: &str = "2000"; static DEFAULT_UNTRUSTED_PORT: &str = "2001"; static DEFAULT_MU_RA_PORT: &str = "3443"; static DEFAULT_METRICS_PORT: &str = "8787"; +static DEFAULT_UNTRUSTED_HTTP_PORT: &str = "4545"; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Config { @@ -29,6 +30,8 @@ pub struct Config { pub enable_metrics_server: bool, /// Port for the metrics server pub metrics_server_port: String, + /// Port for the untrusted HTTP server (e.g. for `is_initialized`) + pub untrusted_http_port: String, } #[allow(clippy::too_many_arguments)] @@ -45,6 +48,7 @@ impl Config { mu_ra_port: String, enable_metrics_server: bool, metrics_server_port: String, + untrusted_http_port: String, ) -> Self { Self { node_ip, @@ -58,6 +62,7 @@ impl Config { mu_ra_port, enable_metrics_server, metrics_server_port, + untrusted_http_port, } } @@ -105,6 +110,10 @@ impl Config { pub fn try_parse_metrics_server_port(&self) -> Option { self.metrics_server_port.parse::().ok() } + + pub fn try_parse_untrusted_http_server_port(&self) -> Option { + self.untrusted_http_port.parse::().ok() + } } impl From<&ArgMatches<'_>> for Config { @@ -114,6 +123,8 @@ impl From<&ArgMatches<'_>> for Config { let mu_ra_port = m.value_of("mu-ra-port").unwrap_or(DEFAULT_MU_RA_PORT); let is_metrics_server_enabled = m.is_present("enable-metrics"); let metrics_server_port = m.value_of("metrics-port").unwrap_or(DEFAULT_METRICS_PORT); + let untrusted_http_port = + m.value_of("untrusted-http-port").unwrap_or(DEFAULT_UNTRUSTED_HTTP_PORT); Self::new( m.value_of("node-server").unwrap_or(DEFAULT_NODE_SERVER).into(), @@ -130,6 +141,7 @@ impl From<&ArgMatches<'_>> for Config { mu_ra_port.to_string(), is_metrics_server_enabled, metrics_server_port.to_string(), + untrusted_http_port.to_string(), ) } } @@ -172,6 +184,8 @@ mod test { assert!(config.trusted_external_worker_address.is_none()); assert!(config.untrusted_external_worker_address.is_none()); assert!(config.mu_ra_external_address.is_none()); + assert!(!config.enable_metrics_server); + assert_eq!(config.untrusted_http_port, DEFAULT_UNTRUSTED_HTTP_PORT); } #[test] @@ -195,6 +209,7 @@ mod test { let untrusted_port = "9119"; let mu_ra_ext_addr = "1.1.3.1:1000"; let mu_ra_port = "99"; + let untrusted_http_port = "4321"; let mut args = ArgMatches::default(); args.args = HashMap::from([ @@ -207,6 +222,7 @@ mod test { ("mu-ra-port", Default::default()), ("untrusted-worker-port", Default::default()), ("trusted-worker-port", Default::default()), + ("untrusted-http-port", Default::default()), ]); // Workaround because MatchedArg is private. args.args.get_mut("node-server").unwrap().vals = vec![node_ip.into()]; @@ -218,6 +234,7 @@ mod test { args.args.get_mut("mu-ra-port").unwrap().vals = vec![mu_ra_port.into()]; args.args.get_mut("untrusted-worker-port").unwrap().vals = vec![untrusted_port.into()]; args.args.get_mut("trusted-worker-port").unwrap().vals = vec![trusted_port.into()]; + args.args.get_mut("untrusted-http-port").unwrap().vals = vec![untrusted_http_port.into()]; let config = Config::from(&args); @@ -229,6 +246,7 @@ mod test { assert_eq!(config.trusted_external_worker_address, Some(trusted_ext_addr.to_string())); assert_eq!(config.untrusted_external_worker_address, Some(untrusted_ext_addr.to_string())); assert_eq!(config.mu_ra_external_address, Some(mu_ra_ext_addr.to_string())); + assert_eq!(config.untrusted_http_port, untrusted_http_port.to_string()); } #[test] diff --git a/service/src/error.rs b/service/src/error.rs index f5543e7bd6..54fa136c9d 100644 --- a/service/src/error.rs +++ b/service/src/error.rs @@ -1,4 +1,21 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + use codec::Error as CodecError; +use itp_types::ShardIdentifier; use substrate_api_client::ApiClientError; pub type ServiceResult = Result; @@ -23,8 +40,8 @@ pub enum Error { FromUtf8(#[from] std::string::FromUtf8Error), #[error("Application setup error!")] ApplicationSetup, - #[error("Retrieved empty value")] - EmptyValue, + #[error("No worker for shard {0} found on parentchain")] + NoWorkerForShardFound(ShardIdentifier), #[error("Insufficient buffer size. Actual: {0}, required: {1}")] InsufficientBufferSize(usize, usize), #[error("Custom Error: {0}")] diff --git a/service/src/globals/mod.rs b/service/src/globals/mod.rs index 8d10504103..ee250661c5 100644 --- a/service/src/globals/mod.rs +++ b/service/src/globals/mod.rs @@ -17,4 +17,3 @@ */ pub mod tokio_handle; -pub mod worker; diff --git a/service/src/globals/worker.rs b/service/src/globals/worker.rs deleted file mode 100644 index 2acc93f525..0000000000 --- a/service/src/globals/worker.rs +++ /dev/null @@ -1,70 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - Copyright (C) 2017-2019 Baidu, Inc. All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -use crate::{config::Config, worker::Worker as WorkerGen}; -use itp_enclave_api::Enclave; -use lazy_static::lazy_static; -use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; -use sp_core::sr25519; -use substrate_api_client::{rpc::WsRpcClient, Api}; - -pub type Worker = WorkerGen, Enclave>; - -lazy_static! { - static ref WORKER: RwLock> = RwLock::new(None); -} - -/// Trait for accessing a worker instance. -pub trait GetWorker { - fn get_worker<'a>(&self) -> RwLockReadGuard<'a, Option>; -} - -/// Trait for accessing a muteable worker instance. -pub trait GetMutWorker { - fn get_mut_worker<'a>(&self) -> RwLockWriteGuard<'a, Option>; -} - -pub struct GlobalWorker; - -/// These are the static (global) accessors. -/// Reduce their usage where possible and use an instance of WorkerAccessorImpl or the trait. -impl GlobalWorker { - pub fn reset_worker(worker: Worker) { - *WORKER.write() = Some(worker); - } - - fn read_worker<'a>() -> RwLockReadGuard<'a, Option> { - WORKER.read() - } - - fn write_worker<'a>() -> RwLockWriteGuard<'a, Option> { - WORKER.write() - } -} - -impl GetWorker for GlobalWorker { - fn get_worker<'a>(&self) -> RwLockReadGuard<'a, Option> { - GlobalWorker::read_worker() - } -} - -impl GetMutWorker for GlobalWorker { - fn get_mut_worker<'a>(&self) -> RwLockWriteGuard<'a, Option> { - GlobalWorker::write_worker() - } -} diff --git a/service/src/initialized_service.rs b/service/src/initialized_service.rs new file mode 100644 index 0000000000..f7cd808d12 --- /dev/null +++ b/service/src/initialized_service.rs @@ -0,0 +1,54 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +//! Service to determine if the integritee services is initialized and registered on the node, +//! hosted on a http server. + +use crate::error::ServiceResult; +use lazy_static::lazy_static; +use log::*; +use parking_lot::RwLock; +use std::net::SocketAddr; +use warp::Filter; + +lazy_static! { + static ref INITIALIZED_HANDLE: RwLock = RwLock::new(false); +} + +pub async fn start_is_initialized_server(port: u16) -> ServiceResult<()> { + let is_initialized_route = warp::path!("is_initialized").and_then(|| async move { + if *INITIALIZED_HANDLE.read() { + Ok("I am initialized.") + } else { + Err(warp::reject::not_found()) + } + }); + + let socket_addr: SocketAddr = ([0, 0, 0, 0], port).into(); + + info!("Running initialized server on: {:?}", socket_addr); + warp::serve(is_initialized_route).run(socket_addr).await; + + info!("Initialized server shut down"); + Ok(()) +} + +/// Set initialized handler value to true. +pub fn set_initialized() { + let mut initialized_lock = INITIALIZED_HANDLE.write(); + *initialized_lock = true; +} diff --git a/service/src/main.rs b/service/src/main.rs index e56c5967db..45cacb5d52 100644 --- a/service/src/main.rs +++ b/service/src/main.rs @@ -20,10 +20,8 @@ use crate::{ account_funding::{setup_account_funding, EnclaveAccountInfoProvider}, error::Error, - globals::{ - tokio_handle::{GetTokioHandle, GlobalTokioHandle}, - worker::{GlobalWorker, Worker}, - }, + globals::tokio_handle::{GetTokioHandle, GlobalTokioHandle}, + initialized_service::{set_initialized, start_is_initialized_server}, ocall_bridge::{ bridge_api::Bridge as OCallBridge, component_factory::OCallBridgeComponentFactory, }, @@ -31,6 +29,7 @@ use crate::{ prometheus_metrics::{start_metrics_server, EnclaveMetricsReceiver, MetricsHandler}, sync_block_gossiper::SyncBlockGossiper, utils::{check_files, extract_shard}, + worker::Worker, worker_peers_updater::WorkerPeersUpdater, }; use base58::ToBase58; @@ -48,6 +47,7 @@ use itp_enclave_api::{ remote_attestation::{RemoteAttestation, TlsRemoteAttestation}, sidechain::Sidechain, teerex_api::TeerexApi, + Enclave, }; use itp_node_api_extensions::{ node_api_factory::{CreateNodeApi, NodeApiFactory}, @@ -55,11 +55,10 @@ use itp_node_api_extensions::{ }; use itp_settings::{ files::{ - ENCRYPTED_STATE_FILE, SHARDS_PATH, SHIELDING_KEY_FILE, SIDECHAIN_PURGE_INTERVAL, - SIDECHAIN_PURGE_LIMIT, SIDECHAIN_STORAGE_PATH, SIGNING_KEY_FILE, + SHIELDING_KEY_FILE, SIDECHAIN_PURGE_INTERVAL, SIDECHAIN_PURGE_LIMIT, + SIDECHAIN_STORAGE_PATH, SIGNING_KEY_FILE, }, sidechain::SLOT_DURATION, - worker::{EXISTENTIAL_DEPOSIT_FACTOR_FOR_INIT_FUNDS, REGISTERING_FEE_FACTOR_FOR_INIT_FUNDS}, }; use its_consensus_slots::start_slot_worker; use its_peer_fetch::{ @@ -74,14 +73,13 @@ use my_node_runtime::{Event, Hash, Header}; use sgx_types::*; use sp_core::{ crypto::{AccountId32, Ss58Codec}, - sr25519, Pair, + sr25519, }; use sp_finality_grandpa::VersionedAuthorityList; use sp_keyring::AccountKeyring; use std::{ fs::{self, File}, - io::{stdin, Write}, - path::{Path, PathBuf}, + path::PathBuf, str, sync::{ mpsc::{channel, Sender}, @@ -91,7 +89,7 @@ use std::{ time::{Duration, Instant}, }; use substrate_api_client::{ - rpc::WsRpcClient, utils::FromHexString, Api, GenericAddress, Header as HeaderTrait, XtStatus, + rpc::WsRpcClient, utils::FromHexString, Api, Header as HeaderTrait, XtStatus, }; use teerex_primitives::ShardIdentifier; @@ -100,6 +98,7 @@ mod config; mod enclave; mod error; mod globals; +mod initialized_service; mod ocall_bridge; mod parentchain_block_syncer; mod prometheus_metrics; @@ -113,6 +112,8 @@ mod worker_peers_updater; /// how many blocks will be synced before storing the chain db to disk const VERSION: &str = env!("CARGO_PKG_VERSION"); +pub type EnclaveWorker = Worker; + fn main() { // Setup logging env_logger::init(); @@ -132,11 +133,7 @@ fn main() { info!("*** Starting service in SGX debug mode"); // build the entire dependency tree - let worker = Arc::new(GlobalWorker {}); let tokio_handle = Arc::new(GlobalTokioHandle {}); - let sync_block_gossiper = - Arc::new(SyncBlockGossiper::new(tokio_handle.clone(), worker.clone())); - let peer_updater = Arc::new(WorkerPeersUpdater::new(worker)); let sidechain_blockstorage = Arc::new( SidechainStorageLock::::new(PathBuf::from(&SIDECHAIN_STORAGE_PATH)) .unwrap(), @@ -144,6 +141,15 @@ fn main() { let node_api_factory = Arc::new(NodeApiFactory::new(config.node_url(), AccountKeyring::Alice.pair())); let enclave = Arc::new(enclave_init(&config).unwrap()); + let worker = Arc::new(EnclaveWorker::new( + config.clone(), + enclave.clone(), + node_api_factory.clone(), + Vec::new(), + )); + let sync_block_gossiper = + Arc::new(SyncBlockGossiper::new(tokio_handle.clone(), worker.clone())); + let peer_updater = Arc::new(WorkerPeersUpdater::new(worker)); let untrusted_peer_fetcher = UntrustedPeerFetcher::new(node_api_factory.clone()); let peer_sidechain_block_fetcher = Arc::new(BlockFetcher::::new(untrusted_peer_fetcher)); @@ -171,13 +177,6 @@ fn main() { let node_api = node_api_factory.create_api().expect("Failed to create parentchain node API"); - GlobalWorker::reset_worker(Worker::new( - config.clone(), - node_api.clone(), - enclave.clone(), - Vec::new(), - )); - start_worker( config, &shard, @@ -229,7 +228,14 @@ fn main() { println!("{}", enclave.get_mrenclave().unwrap().encode().to_base58()); } else if let Some(_matches) = matches.subcommand_matches("init-shard") { let shard = extract_shard(_matches, enclave.as_ref()); - init_shard(&shard); + match enclave.init_shard(shard.encode()) { + Err(e) => { + println!("Failed to initialize shard {:?}: {:?}", shard, e); + }, + Ok(_) => { + println!("Successfully initialized shard {:?}", shard); + }, + } } else if let Some(_matches) = matches.subcommand_matches("test") { if _matches.is_present("provisioning-server") { println!("*** Running Enclave MU-RA TLS server\n"); @@ -308,26 +314,22 @@ fn start_worker( let tokio_handle = tokio_handle_getter.get_handle(); - // ------------------------------------------------------------------------ - // Start trusted worker rpc server. - let direct_invocation_server_addr = config.trusted_worker_url_internal(); - let enclave_for_direct_invocation = enclave.clone(); - thread::spawn(move || { - println!( - "[+] Trusted RPC direction invocation server listening on {}", - direct_invocation_server_addr - ); - enclave_for_direct_invocation - .init_direct_invocation_server(direct_invocation_server_addr) - .unwrap(); - println!("[+] RPC direction invocation server shut down"); - }); - // ------------------------------------------------------------------------ // Get the public key of our TEE. let genesis_hash = node_api.genesis_hash.as_bytes().to_vec(); let tee_accountid = enclave_account(enclave.as_ref()); + // ------------------------------------------------------------------------ + // Start `is_initialized` server. + let untrusted_http_server_port = config + .try_parse_untrusted_http_server_port() + .expect("untrusted http server port to be a valid port number"); + tokio_handle.spawn(async move { + if let Err(e) = start_is_initialized_server(untrusted_http_server_port).await { + error!("Unexpected error in `is_initialized` server: {:?}", e); + } + }); + // ------------------------------------------------------------------------ // Start prometheus metrics server. if config.enable_metrics_server { @@ -344,6 +346,39 @@ fn start_worker( }); } + // ------------------------------------------------------------------------ + // Start trusted worker rpc server + let direct_invocation_server_addr = config.trusted_worker_url_internal(); + let enclave_for_direct_invocation = enclave.clone(); + thread::spawn(move || { + println!( + "[+] Trusted RPC direct invocation server listening on {}", + direct_invocation_server_addr + ); + enclave_for_direct_invocation + .init_direct_invocation_server(direct_invocation_server_addr) + .unwrap(); + println!("[+] RPC direct invocation server shut down"); + }); + + // ------------------------------------------------------------------------ + // Start untrusted worker rpc server. + // FIXME: this should be removed - this server should only handle untrusted things. + // i.e move sidechain block importing to trusted worker. + let enclave_for_block_gossip_rpc_server = enclave.clone(); + let untrusted_url = config.untrusted_worker_url(); + println!("[+] Untrusted RPC server listening on {}", &untrusted_url); + let sidechain_storage_for_rpc = sidechain_storage.clone(); + let _untrusted_rpc_join_handle = tokio_handle.spawn(async move { + itc_rpc_server::run_server( + &untrusted_url, + enclave_for_block_gossip_rpc_server, + sidechain_storage_for_rpc, + ) + .await + .unwrap(); + }); + // ------------------------------------------------------------------------ // Perform a remote attestation and get an unchecked extrinsic back. let nonce = node_api.get_nonce_of(&tee_accountid).unwrap(); @@ -389,7 +424,7 @@ fn start_worker( } let last_synced_header = init_light_client(&node_api, enclave.clone()).unwrap(); - println!("*** [+] Finished syncing light client, syncing parent chain..."); + println!("*** [+] Finished syncing light client, syncing parentchain..."); // Syncing all parentchain blocks, this might take a while.. let parentchain_block_syncer = @@ -408,24 +443,8 @@ fn start_worker( } // ------------------------------------------------------------------------ - // Start untrusted worker rpc server. - // FIXME: this should be removed - this server should only handle untrusted things. - // i.e move sidechain block importing to trusted worker. - let enclave_for_block_gossip_rpc_server = enclave.clone(); - let untrusted_url = config.untrusted_worker_url(); - println!("[+] Untrusted RPC server listening on {}", &untrusted_url); - let sidechain_storage_for_rpc = sidechain_storage.clone(); - let _untrusted_rpc_join_handle = tokio_handle.spawn(async move { - itc_rpc_server::run_server( - &untrusted_url, - enclave_for_block_gossip_rpc_server, - sidechain_storage_for_rpc, - ) - .await - .unwrap(); - }); - - thread::sleep(Duration::from_secs(3)); + // Initialize sidechain components (has to be AFTER init_light_client() + enclave.init_enclave_sidechain_components().unwrap(); // ------------------------------------------------------------------------ // Start interval sidechain block production (execution of trusted calls, sidechain block production). @@ -496,6 +515,9 @@ fn start_worker( }) .unwrap(); + // Set that the service is initialized. + set_initialized(); + println!("[+] Subscribed to events. waiting..."); let timeout = Duration::from_millis(10); loop { @@ -684,25 +706,6 @@ fn execute_trusted_calls(enclave_api: &E) { }; } -fn init_shard(shard: &ShardIdentifier) { - let path = format!("{}/{}", SHARDS_PATH, shard.encode().to_base58()); - println!("initializing shard at {}", path); - fs::create_dir_all(path.clone()).expect("could not create dir"); - - let path = format!("{}/{}", path, ENCRYPTED_STATE_FILE); - if Path::new(&path).exists() { - println!("shard state exists. Overwrite? [y/N]"); - let buffer = &mut String::new(); - stdin().read_line(buffer).unwrap(); - match buffer.trim() { - "y" | "Y" => (), - _ => return, - } - } - let mut file = fs::File::create(path).unwrap(); - file.write_all(b"").unwrap(); -} - /// Get the public signing key of the TEE. fn enclave_account(enclave_api: &E) -> AccountId32 { let tee_public = enclave_api.get_ecc_signing_pubkey().unwrap(); @@ -710,120 +713,6 @@ fn enclave_account(enclave_api: &E) -> AccountId32 { AccountId32::from(*tee_public.as_array_ref()) } -fn account_funding( - api: &mut Api, - accountid: &AccountId32, - extrinsic_prefix: String, - dev: bool, -) -> Result<(), Error> { - // Account funds - if dev { - // Development mode, the faucet will ensure that the enclave has enough funds - ensure_account_has_funds(api, accountid)?; - } else { - // Production mode, there is no faucet. - let registration_fees = enclave_registration_fees(api, &extrinsic_prefix)?; - info!("Registration fees = {:?}", registration_fees); - let free_balance = api.get_free_balance(accountid)?; - info!("TEE's free balance = {:?}", free_balance); - - let min_required_funds = - registration_fees.saturating_mul(REGISTERING_FEE_FACTOR_FOR_INIT_FUNDS); - let missing_funds = min_required_funds.saturating_sub(free_balance); - - if missing_funds > 0 { - // If there are not enough funds, then the user can send the missing TEER to the enclave address and start again. - println!( - "Enclave account: {:}, missing funds {}", - accountid.to_ss58check(), - missing_funds - ); - return Err(Error::Custom( - "Enclave does not have enough funds on the parentchain to register.".into(), - )) - } - } - Ok(()) -} -// Alice plays the faucet and sends some funds to the account if balance is low -fn ensure_account_has_funds( - api: &mut Api, - accountid: &AccountId32, -) -> Result<(), Error> { - // check account balance - let free_balance = api.get_free_balance(accountid)?; - info!("TEE's free balance = {:?}", free_balance); - - let existential_deposit = api.get_existential_deposit()?; - info!("Existential deposit is = {:?}", existential_deposit); - - let min_required_funds = - existential_deposit.saturating_mul(EXISTENTIAL_DEPOSIT_FACTOR_FOR_INIT_FUNDS); - let missing_funds = min_required_funds.saturating_sub(free_balance); - - if missing_funds > 0 { - bootstrap_funds_from_alice(api, accountid, missing_funds)?; - } - Ok(()) -} - -fn enclave_registration_fees( - api: &mut Api, - xthex_prefixed: &str, -) -> Result { - let reg_fee_details = api.get_fee_details(xthex_prefixed, None)?; - match reg_fee_details { - Some(details) => match details.inclusion_fee { - Some(fee) => Ok(fee.inclusion_fee()), - None => Err(Error::Custom( - "Inclusion fee for the registration of the enclave is None!".into(), - )), - }, - None => - Err(Error::Custom("Fee Details for the registration of the enclave is None !".into())), - } -} - -// Alice sends some funds to the account -fn bootstrap_funds_from_alice( - api: &mut Api, - accountid: &AccountId32, - funding_amount: u128, -) -> Result<(), Error> { - let alice = AccountKeyring::Alice.pair(); - info!("encoding Alice's public = {:?}", alice.public().0.encode()); - let alice_acc = AccountId32::from(*alice.public().as_array_ref()); - info!("encoding Alice's AccountId = {:?}", alice_acc.encode()); - - let alice_free = api.get_free_balance(&alice_acc)?; - info!(" Alice's free balance = {:?}", alice_free); - let nonce = api.get_nonce_of(&alice_acc)?; - info!(" Alice's Account Nonce is {}", nonce); - - if funding_amount > alice_free { - println!( - "funding amount is to high: please change EXISTENTIAL_DEPOSIT_FACTOR_FOR_INIT_FUNDS ({:?})", - funding_amount - ); - return Err(Error::ApplicationSetup) - } - - let signer_orig = api.signer.clone(); - api.signer = Some(alice); - - println!("[+] bootstrap funding Enclave from Alice's funds"); - let xt = api.balance_transfer(GenericAddress::Id(accountid.clone()), funding_amount); - let xt_hash = api.send_extrinsic(xt.hex_encode(), XtStatus::InBlock)?; - info!("[<] Extrinsic got included in a block. Hash: {:?}\n", xt_hash); - - // Verify funds have arrived. - let free_balance = api.get_free_balance(accountid); - info!("TEE's NEW free balance = {:?}", free_balance); - - api.signer = signer_orig; - Ok(()) -} - /// Ensure we're synced up until the parentchain block where we have registered ourselves. fn import_parentchain_blocks_until_self_registry< E: EnclaveBase + TeerexApi + Sidechain, diff --git a/service/src/ocall_bridge/sidechain_ocall.rs b/service/src/ocall_bridge/sidechain_ocall.rs index 3595fb8786..954982a02b 100644 --- a/service/src/ocall_bridge/sidechain_ocall.rs +++ b/service/src/ocall_bridge/sidechain_ocall.rs @@ -25,7 +25,7 @@ use crate::{ use codec::{Decode, Encode}; use itp_types::{BlockHash, ShardIdentifier}; use its_peer_fetch::FetchBlocksFromPeer; -use its_primitives::types::SignedBlock as SignedSidechainBlock; +use its_primitives::{traits::Block, types::SignedBlock as SignedSidechainBlock}; use its_storage::BlockStorage; use log::*; use std::sync::Arc; @@ -86,13 +86,17 @@ where if !signed_blocks.is_empty() { info!( "Enclave produced sidechain blocks: {:?}", - signed_blocks.iter().map(|b| b.block.block_number).collect::>() + signed_blocks + .iter() + .map(|b| b.block.header().block_number) + .collect::>() ); } else { debug!("Enclave did not produce sidechain blocks"); } // FIXME: When & where should peers be updated? + debug!("Updating peers.."); if let Err(e) = self.peer_updater.update_peers() { error!("Error updating peers: {:?}", e); // Fixme: returning an error here results in a `HeaderAncestryMismatch` error. @@ -101,6 +105,7 @@ where info!("Successfully updated peers"); } + debug!("Gossiping sidechain blocks.."); if let Err(e) = self.block_gossiper.gossip_blocks(signed_blocks) { error!("Error gossiping blocks: {:?}", e); // Fixme: returning an error here results in a `HeaderAncestryMismatch` error. diff --git a/service/src/sync_block_gossiper.rs b/service/src/sync_block_gossiper.rs index d1fde63f38..29e230ae95 100644 --- a/service/src/sync_block_gossiper.rs +++ b/service/src/sync_block_gossiper.rs @@ -16,20 +16,18 @@ */ +#[cfg(test)] +use mockall::predicate::*; +#[cfg(test)] +use mockall::*; + use crate::{ - error::Error, - globals::{tokio_handle::GetTokioHandle, worker::GetWorker}, + globals::tokio_handle::GetTokioHandle, worker::{AsyncBlockGossiper, WorkerResult}, }; use its_primitives::types::SignedBlock as SignedSidechainBlock; -use log::*; use std::sync::Arc; -#[cfg(test)] -use mockall::predicate::*; -#[cfg(test)] -use mockall::*; - /// Allows to gossip blocks, does it in a synchronous (i.e. blocking) manner #[cfg_attr(test, automock)] pub trait GossipBlocks { @@ -50,18 +48,10 @@ impl SyncBlockGossiper { impl GossipBlocks for SyncBlockGossiper where T: GetTokioHandle, - W: GetWorker, + W: AsyncBlockGossiper, { fn gossip_blocks(&self, blocks: Vec) -> WorkerResult<()> { - match self.worker.get_worker().as_ref() { - Some(w) => { - let handle = self.tokio_handle.get_handle(); - handle.block_on(w.gossip_blocks(blocks)) - }, - None => { - error!("Failed to get worker instance"); - Err(Error::ApplicationSetup) - }, - } + let handle = self.tokio_handle.get_handle(); + handle.block_on(self.worker.gossip_blocks(blocks)) } } diff --git a/service/src/sync_state.rs b/service/src/sync_state.rs index 870a6e0a0d..33ce7bcea5 100644 --- a/service/src/sync_state.rs +++ b/service/src/sync_state.rs @@ -37,7 +37,7 @@ pub(crate) fn sync_state( // FIXME: we now assume that keys are equal for all shards. let provider_url = executor::block_on(get_author_url_of_last_finalized_sidechain_block(node_api, shard)) - .unwrap(); + .expect("Author of last finalized sidechain block could not be found"); println!("Requesting state provisioning from worker at {}", &provider_url); enclave_request_state_provisioning( @@ -61,7 +61,9 @@ async fn get_author_url_of_last_finalized_sidechain_block Result { - let enclave = node_api.worker_for_shard(shard, None)?.ok_or(Error::EmptyValue)?; + let enclave = node_api + .worker_for_shard(shard, None)? + .ok_or_else(|| Error::NoWorkerForShardFound(*shard))?; let worker_api_direct = DirectWorkerApi::new(enclave.url); Ok(worker_api_direct.get_mu_ra_url()?) } diff --git a/service/src/tests/commons.rs b/service/src/tests/commons.rs index 0619677f97..9948a63f7f 100644 --- a/service/src/tests/commons.rs +++ b/service/src/tests/commons.rs @@ -55,5 +55,6 @@ pub fn local_worker_config( mu_ra_port, false, "8787".to_string(), + "4545".to_string(), ) } diff --git a/service/src/tests/ecalls.rs b/service/src/tests/ecalls.rs index 9bad446334..62f904f67d 100644 --- a/service/src/tests/ecalls.rs +++ b/service/src/tests/ecalls.rs @@ -15,9 +15,10 @@ */ -use crate::{init_shard, tests::commons::test_trusted_getter_signed}; +use crate::tests::commons::test_trusted_getter_signed; use codec::Encode; use itp_enclave_api::{enclave_base::EnclaveBase, EnclaveResult}; +use itp_stf_state_handler::file_io::purge_shard_dir; use log::*; use sp_core::hash::H256; use sp_keyring::AccountKeyring; @@ -26,12 +27,14 @@ pub fn get_state_works(enclave_api: &E) -> EnclaveResult<()> { let alice = AccountKeyring::Alice; let trusted_getter_signed = test_trusted_getter_signed(alice).encode(); let shard = H256::default(); - init_shard(&shard); + enclave_api.init_shard(shard.encode())?; let res = enclave_api.get_state(trusted_getter_signed, shard.encode())?; debug!("got state value: {:?}", hex::encode(res.clone())); //println!("get_state returned {:?}", res); assert!(!res.is_empty()); + purge_shard_dir(&shard); + Ok(()) } diff --git a/service/src/tests/mock.rs b/service/src/tests/mock.rs index 5f18fade11..a575faf85d 100644 --- a/service/src/tests/mock.rs +++ b/service/src/tests/mock.rs @@ -1,3 +1,20 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + use itp_node_api_extensions::{ApiResult, PalletTeerexApi}; use itp_types::{Enclave, ShardIdentifier, H256 as Hash}; diff --git a/service/src/tests/mocks/enclave_api_mock.rs b/service/src/tests/mocks/enclave_api_mock.rs index 3e6751a197..0436b16a3a 100644 --- a/service/src/tests/mocks/enclave_api_mock.rs +++ b/service/src/tests/mocks/enclave_api_mock.rs @@ -30,6 +30,10 @@ impl EnclaveBase for EnclaveBaseMock { Ok(()) } + fn init_enclave_sidechain_components(&self) -> EnclaveResult<()> { + Ok(()) + } + fn init_direct_invocation_server(&self, _rpc_server_addr: String) -> EnclaveResult<()> { unreachable!() } @@ -43,6 +47,10 @@ impl EnclaveBase for EnclaveBaseMock { Ok(genesis_header) } + fn init_shard(&self, _shard: Vec) -> EnclaveResult<()> { + unimplemented!() + } + fn trigger_parentchain_block_import(&self) -> EnclaveResult<()> { unimplemented!() } diff --git a/service/src/tests/mod.rs b/service/src/tests/mod.rs index c296fe6459..1f3490de5f 100644 --- a/service/src/tests/mod.rs +++ b/service/src/tests/mod.rs @@ -26,9 +26,6 @@ pub mod commons; pub mod ecalls; pub mod mock; -#[cfg(test)] -pub mod worker; - #[cfg(test)] pub mod mocks; diff --git a/service/src/tests/worker.rs b/service/src/tests/worker.rs deleted file mode 100644 index fde0241e01..0000000000 --- a/service/src/tests/worker.rs +++ /dev/null @@ -1,36 +0,0 @@ -use itp_node_api_extensions::PalletTeerexApi; -use lazy_static::lazy_static; -use parking_lot::RwLock; - -use crate::{ - config::Config, - tests::{ - commons::local_worker_config, - mock::{enclaves, TestNodeApi, W2_URL}, - }, - worker::Worker as WorkerGen, -}; -use std::sync::Arc; - -type TestWorker = WorkerGen; - -lazy_static! { - static ref WORKER: RwLock> = RwLock::new(None); -} - -#[test] -fn worker_rw_lock_works() { - { - let mut w = WORKER.write(); - *w = Some(TestWorker::new( - local_worker_config(W2_URL.into(), "10".to_string(), "20".to_string()), - TestNodeApi, - Arc::new(()), - Vec::new(), - )); - } - - let w = WORKER.read(); - // call some random function to see how the worker needs to be called. - assert_eq!(w.as_ref().unwrap().node_api().all_enclaves(None).unwrap(), enclaves()) -} diff --git a/service/src/worker.rs b/service/src/worker.rs index 90314f7baf..542ca2a1e2 100644 --- a/service/src/worker.rs +++ b/service/src/worker.rs @@ -23,7 +23,7 @@ use crate::{config::Config, error::Error}; use async_trait::async_trait; use itc_rpc_client::direct_client::{DirectApi, DirectClient as DirectWorkerApi}; -use itp_node_api_extensions::PalletTeerexApi; +use itp_node_api_extensions::{node_api_factory::CreateNodeApi, PalletTeerexApi}; use its_primitives::{ constants::RPC_METHOD_NAME_IMPORT_BLOCKS, types::SignedBlock as SignedSidechainBlock, }; @@ -32,32 +32,31 @@ use jsonrpsee::{ ws_client::WsClientBuilder, }; use log::*; -use std::sync::Arc; +use std::sync::{Arc, RwLock}; pub type WorkerResult = Result; pub type Url = String; -pub struct Worker { +pub struct Worker { _config: Config, - node_api: NodeApi, // todo: Depending on system design, all the api fields should be Arc // unused yet, but will be used when more methods are migrated to the worker _enclave_api: Arc, - peers: Vec, + node_api_factory: Arc, + peers: RwLock>, } -impl Worker { +impl Worker { pub fn new( - _config: Config, - node_api: NodeApi, - _enclave_api: Arc, + config: Config, + enclave_api: Arc, + node_api_factory: Arc, peers: Vec, ) -> Self { - Self { _config, node_api, _enclave_api, peers } - } - - // will soon be used. - #[allow(dead_code)] - pub fn node_api(&self) -> &NodeApi { - &self.node_api + Self { + _config: config, + _enclave_api: enclave_api, + node_api_factory, + peers: RwLock::new(peers), + } } } @@ -68,9 +67,9 @@ pub trait AsyncBlockGossiper { } #[async_trait] -impl AsyncBlockGossiper for Worker +impl AsyncBlockGossiper for Worker where - NodeApi: PalletTeerexApi + Send + Sync, + NodeApiFactory: CreateNodeApi + Send + Sync, Enclave: Send + Sync, { async fn gossip_blocks(&self, blocks: Vec) -> WorkerResult<()> { @@ -80,8 +79,11 @@ where } let blocks_json = vec![to_json_value(blocks)?]; + let peers = self.peers.read().map_err(|e| { + Error::Custom(format!("Encountered poisoned lock for peers: {:?}", e).into()) + })?; - for url in self.peers.iter().cloned() { + for url in peers.iter().cloned() { let blocks = blocks_json.clone(); tokio::spawn(async move { @@ -112,30 +114,48 @@ where /// Looks for new peers and updates them. pub trait UpdatePeers { fn search_peers(&self) -> WorkerResult>; - fn set_peers(&mut self, peers: Vec) -> WorkerResult<()>; - fn update_peers(&mut self) -> WorkerResult<()> { + + fn set_peers(&self, peers: Vec) -> WorkerResult<()>; + + fn update_peers(&self) -> WorkerResult<()> { let peers = self.search_peers()?; self.set_peers(peers) } } -impl UpdatePeers for Worker +impl UpdatePeers for Worker where - NodeApi: PalletTeerexApi + Send + Sync, + NodeApiFactory: CreateNodeApi + Send + Sync, { fn search_peers(&self) -> WorkerResult> { - let enclaves = self.node_api.all_enclaves(None)?; + let node_api = self + .node_api_factory + .create_api() + .map_err(|e| Error::Custom(format!("Failed to create NodeApi: {:?}", e).into()))?; + let enclaves = node_api.all_enclaves(None)?; let mut peer_urls = Vec::::new(); for enclave in enclaves { // FIXME: This is temporary only, as block gossiping should be moved to trusted ws server. + let enclave_url = enclave.url.clone(); let worker_api_direct = DirectWorkerApi::new(enclave.url); - peer_urls.push(worker_api_direct.get_untrusted_worker_url()?); + let untrusted_worker_url = + worker_api_direct.get_untrusted_worker_url().map_err(|e| { + error!( + "Failed to get untrusted worker url (enclave: {}): {:?}", + enclave_url, e + ); + e + })?; + peer_urls.push(untrusted_worker_url); } Ok(peer_urls) } - fn set_peers(&mut self, peers: Vec) -> WorkerResult<()> { - self.peers = peers; + fn set_peers(&self, peers: Vec) -> WorkerResult<()> { + let mut peers_lock = self.peers.write().map_err(|e| { + Error::Custom(format!("Encountered poisoned lock for peers: {:?}", e).into()) + })?; + *peers_lock = peers; Ok(()) } } @@ -145,15 +165,17 @@ mod tests { use crate::{ tests::{ commons::local_worker_config, - mock::{TestNodeApi, W1_URL, W2_URL}, + mock::{W1_URL, W2_URL}, }, worker::{AsyncBlockGossiper, Worker}, }; use frame_support::assert_ok; + use itp_node_api_extensions::node_api_factory::NodeApiFactory; use its_primitives::types::SignedBlock as SignedSidechainBlock; use its_test::sidechain_block_builder::SidechainBlockBuilder; use jsonrpsee::{ws_server::WsServerBuilder, RpcModule}; use log::debug; + use sp_keyring::AccountKeyring; use std::{net::SocketAddr, sync::Arc}; use tokio::net::ToSocketAddrs; @@ -188,8 +210,11 @@ mod tests { let worker = Worker::new( local_worker_config(W1_URL.into(), untrusted_worker_port.clone(), "30".to_string()), - TestNodeApi, Arc::new(()), + Arc::new(NodeApiFactory::new( + "ws://invalid.url".to_string(), + AccountKeyring::Alice.pair(), + )), peers, ); diff --git a/service/src/worker_peers_updater.rs b/service/src/worker_peers_updater.rs index 05d2ea51a0..5b536ef667 100644 --- a/service/src/worker_peers_updater.rs +++ b/service/src/worker_peers_updater.rs @@ -16,49 +16,35 @@ */ -use crate::{ - error::Error, - globals::worker::GetMutWorker, - worker::{UpdatePeers, WorkerResult}, -}; -use log::*; -use std::sync::Arc; - #[cfg(test)] use mockall::predicate::*; #[cfg(test)] use mockall::*; +use crate::worker::{UpdatePeers, WorkerResult}; +use std::sync::Arc; + /// Updates the peers of the global worker. #[cfg_attr(test, automock)] pub trait UpdateWorkerPeers { fn update_peers(&self) -> WorkerResult<()>; } -pub struct WorkerPeersUpdater { - worker: Arc, +pub struct WorkerPeersUpdater { + worker: Arc, } -impl WorkerPeersUpdater { - pub fn new(worker: Arc) -> Self { +impl WorkerPeersUpdater { + pub fn new(worker: Arc) -> Self { WorkerPeersUpdater { worker } } } -// FIXME: We should write unit tests for this one here - but with the global worker struct, which is not yet made to be mocked, -// this would require a lot of changes. -impl UpdateWorkerPeers for WorkerPeersUpdater +impl UpdateWorkerPeers for WorkerPeersUpdater where - Worker: GetMutWorker, + WorkerType: UpdatePeers, { fn update_peers(&self) -> WorkerResult<()> { - let maybe_worker = &mut *self.worker.get_mut_worker(); - match maybe_worker { - Some(w) => w.update_peers(), - None => { - error!("Failed to get worker instance"); - Err(Error::ApplicationSetup) - }, - } + self.worker.update_peers() } } diff --git a/sidechain/block-composer/Cargo.toml b/sidechain/block-composer/Cargo.toml index 15b99a5a9e..980b662a09 100644 --- a/sidechain/block-composer/Cargo.toml +++ b/sidechain/block-composer/Cargo.toml @@ -16,10 +16,10 @@ itp-settings = { path = "../../core-primitives/settings" } itp-sgx-crypto = { path = "../../core-primitives/sgx/crypto", default-features = false } itp-stf-executor = { path = "../../core-primitives/stf-executor", default-features = false } itp-time-utils = { path = "../../core-primitives/time-utils", default-features = false } +itp-top-pool-author = { path = "../../core-primitives/top-pool-author", default-features = false } itp-types = { path = "../../core-primitives/types", default-features = false } its-primitives = { path = "../primitives", default-features = false } its-state = { path = "../state", default-features = false } -its-top-pool-rpc-author = { path = "../top-pool-rpc-author", default-features = false } # integritee dependencies sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master" } @@ -31,10 +31,10 @@ thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linu thiserror = { version = "1.0", optional = true } # no-std compatible libraries -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] @@ -44,10 +44,10 @@ std = [ "itp-sgx-crypto/std", "itp-stf-executor/std", "itp-time-utils/std", + "itp-top-pool-author/std", "itp-types/std", "its-primitives/std", "its-state/std", - "its-top-pool-rpc-author/std", "log/std", "sgx-externalities/std", "thiserror", @@ -58,8 +58,8 @@ sgx = [ "itp-sgx-crypto/sgx", "itp-stf-executor/sgx", "itp-time-utils/sgx", + "itp-top-pool-author/sgx", "itp-types/sgx", "its-state/sgx", - "its-top-pool-rpc-author/sgx", "thiserror_sgx", ] diff --git a/sidechain/block-composer/src/block_composer.rs b/sidechain/block-composer/src/block_composer.rs index f4b47e05df..175f80eb1e 100644 --- a/sidechain/block-composer/src/block_composer.rs +++ b/sidechain/block-composer/src/block_composer.rs @@ -23,10 +23,10 @@ use itp_sgx_crypto::StateCrypto; use itp_time_utils::now_as_u64; use itp_types::{OpaqueCall, ShardIdentifier, H256}; use its_primitives::traits::{ - Block as SidechainBlockTrait, SignBlock, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, BlockData, Header as HeaderTrait, SignBlock, + SignedBlock as SignedSidechainBlockTrait, }; use its_state::{LastBlockExt, SidechainDB, SidechainState, SidechainSystemExt, StateHash}; -use its_top_pool_rpc_author::traits::{AuthorApi, OnBlockCreated, SendState}; use log::*; use sgx_externalities::SgxExternalitiesTrait; use sp_core::Pair; @@ -34,7 +34,7 @@ use sp_runtime::{ traits::{Block as ParentchainBlockTrait, Header}, MultiSignature, }; -use std::{format, marker::PhantomData, sync::Arc, vec::Vec}; +use std::{format, marker::PhantomData, vec::Vec}; /// Compose a sidechain block and corresponding confirmation extrinsic for the parentchain /// @@ -52,47 +52,46 @@ pub trait ComposeBlockAndConfirmation { +pub struct BlockComposer { signer: Signer, state_key: StateKey, - rpc_author: Arc, _phantom: PhantomData<(ParentchainBlock, SignedSidechainBlock)>, } -impl - BlockComposer +impl + BlockComposer where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, SignedSidechainBlock::Signature: From, - RpcAuthor: AuthorApi - + OnBlockCreated - + SendState, Signer: Pair, Signer::Public: Encode, StateKey: StateCrypto, { - pub fn new(signer: Signer, state_key: StateKey, rpc_author: Arc) -> Self { - BlockComposer { signer, state_key, rpc_author, _phantom: Default::default() } + pub fn new(signer: Signer, state_key: StateKey) -> Self { + BlockComposer { signer, state_key, _phantom: Default::default() } } } -impl +type HeaderTypeOf = <::Block as SidechainBlockTrait>::HeaderType; +type BlockDataTypeOf = + <::Block as SidechainBlockTrait>::BlockDataType; + +impl ComposeBlockAndConfirmation - for BlockComposer + for BlockComposer where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, SignedSidechainBlock::Signature: From, - RpcAuthor: AuthorApi - + OnBlockCreated - + SendState, Externalities: SgxExternalitiesTrait + SidechainState + SidechainSystemExt + StateHash + Encode, Signer: Pair, Signer::Public: Encode, @@ -114,7 +113,7 @@ where let state_hash_new = db.state_hash(); let (block_number, parent_hash) = match db.get_last_block() { - Some(block) => (block.block_number() + 1, block.hash()), + Some(block) => (block.header().block_number() + 1, block.hash()), None => { info!("Seems to be first sidechain block."); (1, Default::default()) @@ -134,23 +133,28 @@ where Error::Other(format!("Failed to encrypt state payload: {:?}", e).into()) })?; - let block = SignedSidechainBlock::Block::new( + let block_data = BlockDataTypeOf::::new( author_public, - block_number, - parent_hash, latest_parentchain_header.hash(), - shard, top_call_hashes, payload, now_as_u64(), ); + let header = HeaderTypeOf::::new( + block_number, + parent_hash, + shard, + block_data.hash(), + ); + + let block = SignedSidechainBlock::Block::new(header, block_data); + let block_hash = block.hash(); debug!("Block hash {}", block_hash); let opaque_call = create_proposed_sidechain_block_call(shard, block_hash); - self.rpc_author.on_block_created(block.signed_top_hashes(), block.hash()); let signed_block = block.sign_block(&self.signer); Ok((opaque_call, signed_block)) diff --git a/sidechain/block-composer/src/error.rs b/sidechain/block-composer/src/error.rs index 0b142e55e9..688c49bc75 100644 --- a/sidechain/block-composer/src/error.rs +++ b/sidechain/block-composer/src/error.rs @@ -31,7 +31,7 @@ pub enum Error { #[error("STF execution error: {0}")] StfExecution(#[from] itp_stf_executor::error::Error), #[error("TOP pool RPC author error: {0}")] - TopPoolRpcAuthor(#[from] its_top_pool_rpc_author::error::Error), + TopPoolAuthor(#[from] itp_top_pool_author::error::Error), #[error(transparent)] Other(#[from] Box), } diff --git a/sidechain/consensus/aura/Cargo.toml b/sidechain/consensus/aura/Cargo.toml index 4f4d8dfdee..62b5fe807f 100644 --- a/sidechain/consensus/aura/Cargo.toml +++ b/sidechain/consensus/aura/Cargo.toml @@ -6,16 +6,16 @@ edition = "2018" [dependencies] log = { version = "0.4.14", default-features = false } -finality-grandpa = { version = "0.14.3", default-features = false, features = ["derive-codec"] } -sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master" } +finality-grandpa = { version = "0.15.0", default-features = false, features = ["derive-codec"] } +sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", optional = true } # sgx deps sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } # substrate deps -frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # local deps ita-stf = { path = "../../../app-libs/stf", default-features = false } @@ -28,7 +28,6 @@ itp-settings = { path = "../../../core-primitives/settings" } itp-sgx-crypto = { path = "../../../core-primitives/sgx/crypto", default-features = false } itp-stf-executor = { path = "../../../core-primitives/stf-executor", default-features = false } itp-stf-state-handler = { path = "../../../core-primitives/stf-state-handler", default-features = false } -itp-storage-verifier = { path = "../../../core-primitives/storage-verified", default-features = false } itp-time-utils = { path = "../../../core-primitives/time-utils", default-features = false } itp-types = { path = "../../../core-primitives/types", default-features = false } its-block-composer = { path = "../../block-composer", default-features = false } @@ -39,17 +38,19 @@ its-state = { path = "../../state", default-features = false } its-top-pool-executor = { path = "../../top-pool-executor", default-features = false } its-validateer-fetch = { path = "../../validateer-fetch", default-features = false } -pallet-ajuna-connectfour = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "update-substrate-5" } +pallet-ajuna-connectfour = { default-features = false, git = "https://github.com/ajuna-network/ajuna-node.git", branch = "validateer-setup" } [dev-dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false } env_logger = "0.9.0" itc-parentchain-block-import-dispatcher = { path = "../../../core/parentchain/block-import-dispatcher", features = ["mocks"] } +itc-parentchain-light-client = { path = "../../../core/parentchain/light-client", features = ["mocks"] } +itp-extrinsics-factory = { path = "../../../core-primitives/extrinsics-factory", features = ["mocks"] } itp-storage = { path = "../../../core-primitives/storage" } itp-test = { path = "../../../core-primitives/test" } its-test = { path = "../../test" } its-top-pool-executor = { path = "../../top-pool-executor", features = ["mocks"] } -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] default = ["std"] @@ -61,7 +62,6 @@ std = [ "itp-sgx-crypto/std", "itp-stf-executor/std", "itp-stf-state-handler/std", - "itp-storage-verifier/std", "itp-time-utils/std", "itp-types/std", "its-block-composer/std", @@ -83,7 +83,6 @@ sgx = [ "itp-sgx-crypto/sgx", "itp-stf-executor/sgx", "itp-stf-state-handler/sgx", - "itp-storage-verifier/sgx", "itp-time-utils/sgx", "its-block-composer/sgx", "its-consensus-common/sgx", diff --git a/sidechain/consensus/aura/src/block_importer.rs b/sidechain/consensus/aura/src/block_importer.rs index b1aaae8ca1..465fa7f82a 100644 --- a/sidechain/consensus/aura/src/block_importer.rs +++ b/sidechain/consensus/aura/src/block_importer.rs @@ -20,7 +20,7 @@ // Reexport BlockImport trait which implements fn block_import() pub use its_consensus_common::BlockImport; -use crate::{std::string::ToString, AuraVerifier, SidechainBlockTrait}; +use crate::{AuraVerifier, EnclaveOnChainOCallApi, SidechainBlockTrait}; use ita_stf::{ hash::TrustedOperationOrHash, helpers::get_board_for, ParentchainHeader, SgxBoardStruct, TrustedCall, TrustedCallSigned, @@ -31,7 +31,7 @@ use itc_parentchain_block_import_dispatcher::triggered_dispatcher::{ use itc_parentchain_light_client::{concurrent_access::ValidatorAccess, Validator}; use itp_enclave_metrics::EnclaveMetric; use itp_extrinsics_factory::CreateExtrinsics; -use itp_ocall_api::{EnclaveMetricsOCallApi, EnclaveOnChainOCallApi, EnclaveSidechainOCallApi}; +use itp_ocall_api::{EnclaveMetricsOCallApi, EnclaveSidechainOCallApi}; use itp_settings::{ node::{FINISH_GAME, GAME_REGISTRY_MODULE}, sidechain::SLOT_DURATION, @@ -42,9 +42,9 @@ use itp_stf_state_handler::handle_state::HandleState; use itp_types::{OpaqueCall, H256}; use its_consensus_common::Error as ConsensusError; use its_primitives::traits::{ - Block as BlockTrait, ShardIdentifierFor, SignedBlock as SignedBlockTrait, SignedBlock, + BlockData, Header as HeaderTrait, ShardIdentifierFor, SignedBlock as SignedBlockTrait, }; -use its_state::SidechainDB; +use its_state::{SidechainDB, SidechainState}; use its_top_pool_executor::TopPoolCallOperator; use its_validateer_fetch::ValidateerFetch; use log::*; @@ -54,7 +54,7 @@ use sp_core::Pair; use sp_runtime::{ generic::SignedBlock as SignedParentchainBlock, traits::Block as ParentchainBlockTrait, }; -use std::{marker::PhantomData, sync::Arc, vec::Vec}; +use std::{marker::PhantomData, string::ToString, sync::Arc, vec::Vec}; /// Implements `BlockImport`. #[derive(Clone)] @@ -112,7 +112,8 @@ impl< Authority::Public: std::fmt::Debug, ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedBlockTrait + 'static, - SignedSidechainBlock::Block: BlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, OCallApi: EnclaveSidechainOCallApi + ValidateerFetch + EnclaveMetricsOCallApi @@ -153,21 +154,26 @@ impl< } } - pub(crate) fn remove_calls_from_top_pool( - &self, - signed_top_hashes: &[H256], - shard: &ShardIdentifierFor, - ) { - let executed_operations = signed_top_hashes + pub(crate) fn update_top_pool(&self, sidechain_block: &SignedSidechainBlock::Block) { + // FIXME: we should take the rpc author here directly #547. + + // Notify pool about imported block for status updates of the calls. + self.top_pool_executor.on_block_imported(sidechain_block); + + // Remove calls from pool. + let executed_operations = sidechain_block + .block_data() + .signed_top_hashes() .iter() .map(|hash| { // Only successfully executed operations are included in a block. ExecutedOperation::success(*hash, TrustedOperationOrHash::Hash(*hash), Vec::new()) }) .collect(); - // FIXME: we should take the rpc author here directly #547 - let unremoved_calls = - self.top_pool_executor.remove_calls_from_pool(shard, executed_operations); + + let unremoved_calls = self + .top_pool_executor + .remove_calls_from_pool(&sidechain_block.header().shard_id(), executed_operations); for unremoved_call in unremoved_calls { error!( @@ -183,10 +189,10 @@ impl< fn get_calls_in_block( &self, - sidechain_block: &::Block, + sidechain_block: &SignedSidechainBlock::Block, ) -> Result, ConsensusError> { - let shard = &sidechain_block.shard_id(); - let top_hashes = sidechain_block.signed_top_hashes(); + let shard = &sidechain_block.header().shard_id(); + let top_hashes = sidechain_block.block_data().signed_top_hashes(); let calls = self .top_pool_executor .get_trusted_calls(shard) @@ -203,14 +209,14 @@ impl< fn get_board_if_game_finished( &self, - sidechain_block: &::Block, + sidechain_block: &SignedSidechainBlock::Block, call: &TrustedCallSigned, ) -> Result, ConsensusError> { - let shard = &sidechain_block.shard_id(); + let shard = &sidechain_block.header().shard_id(); if let TrustedCall::connectfour_play_turn(account, _b) = &call.call { let mut state = self .state_handler - .load_initialized(&shard) + .load(&shard) .map_err(|e| ConsensusError::Other(format!("{:?}", e).into()))?; if let Some(board) = state.execute_with(|| get_board_for(account.clone())) { if let BoardState::Finished(_) = board.board_state { @@ -225,10 +231,10 @@ impl< fn send_game_finished_extrinsic( &self, - sidechain_block: &::Block, + sidechain_block: &SignedSidechainBlock::Block, board: SgxBoardStruct, ) -> Result<(), ConsensusError> { - let shard = &sidechain_block.shard_id(); + let shard = &sidechain_block.header().shard_id(); // player 1 is red, player 2 is blue // the winner is not the next player let winner = match board.next_player { @@ -292,9 +298,11 @@ impl< Authority::Public: std::fmt::Debug, ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedBlockTrait + 'static, - SignedSidechainBlock::Block: BlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, OCallApi: EnclaveSidechainOCallApi + ValidateerFetch + + EnclaveOnChainOCallApi + EnclaveMetricsOCallApi + Send + Sync @@ -309,6 +317,7 @@ impl< + Sync, ExtrinsicsFactory: CreateExtrinsics, ValidatorAccessor: ValidatorAccess, + SidechainDB: SidechainState, { type Verifier = AuraVerifier< Authority, @@ -341,7 +350,7 @@ impl< let updated_state = mutating_function(Self::SidechainState::new(state))?; self.state_handler - .write(updated_state.ext, write_lock, shard) + .write_after_mutation(updated_state.ext, write_lock, shard) .map_err(|e| ConsensusError::Other(format!("{:?}", e).into()))?; Ok(()) @@ -357,7 +366,7 @@ impl< { let state = self .state_handler - .load_initialized(shard) + .load(shard) .map_err(|e| ConsensusError::Other(format!("{:?}", e).into()))?; verifying_function(Self::SidechainState::new(state)) } @@ -380,7 +389,8 @@ impl< let maybe_latest_imported_block = self .parentchain_block_importer .import_until(|signed_parentchain_block| { - signed_parentchain_block.block.hash() == sidechain_block.layer_one_head() + signed_parentchain_block.block.hash() + == sidechain_block.block_data().layer_one_head() }) .map_err(|e| ConsensusError::Other(format!("{:?}", e).into()))?; @@ -394,12 +404,13 @@ impl< sidechain_block: &SignedSidechainBlock::Block, last_imported_parentchain_header: &ParentchainBlock::Header, ) -> Result { - if sidechain_block.layer_one_head() == last_imported_parentchain_header.hash() { + if sidechain_block.block_data().layer_one_head() == last_imported_parentchain_header.hash() + { debug!("No queue peek necessary, sidechain block references latest imported parentchain block"); return Ok(last_imported_parentchain_header.clone()) } - let parentchain_header_hash_to_peek = sidechain_block.layer_one_head(); + let parentchain_header_hash_to_peek = sidechain_block.block_data().layer_one_head(); let maybe_signed_parentchain_block = self .parentchain_block_importer .peek(|parentchain_block| { @@ -414,8 +425,8 @@ impl< format!( "Failed to find parentchain header in import queue (hash: {}) that is \ associated with the current sidechain block that is to be imported (number: {}, hash: {})", - sidechain_block.layer_one_head(), - sidechain_block.block_number(), + sidechain_block.block_data().layer_one_head(), + sidechain_block.header().block_number(), sidechain_block.hash() ) .into(), @@ -434,16 +445,13 @@ impl< // If the block has been proposed by this enclave, remove all successfully applied // trusted calls from the top pool. - if self.block_author_is_self(sidechain_block.block_author()) { - self.remove_calls_from_top_pool( - sidechain_block.signed_top_hashes(), - &sidechain_block.shard_id(), - ) + if self.block_author_is_self(sidechain_block.block_data().block_author()) { + self.update_top_pool(sidechain_block) } // Send metric about sidechain block height (i.e. block number) let block_height_metric = - EnclaveMetric::SetSidechainBlockHeight(sidechain_block.block_number()); + EnclaveMetric::SetSidechainBlockHeight(sidechain_block.header().block_number()); if let Err(e) = self.ocall_api.update_metric(block_height_metric) { warn!("Failed to update sidechain block height metric: {:?}", e); } diff --git a/sidechain/consensus/aura/src/lib.rs b/sidechain/consensus/aura/src/lib.rs index c388bcc01b..3f27dec374 100644 --- a/sidechain/consensus/aura/src/lib.rs +++ b/sidechain/consensus/aura/src/lib.rs @@ -31,12 +31,12 @@ extern crate sgx_tstd as std; use core::marker::PhantomData; use itc_parentchain_block_import_dispatcher::triggered_dispatcher::TriggerParentchainBlockImport; -use itp_storage_verifier::GetStorageVerified; +use itp_ocall_api::EnclaveOnChainOCallApi; use itp_time_utils::duration_now; use its_consensus_common::{Environment, Error as ConsensusError, Proposer}; use its_consensus_slots::{SimpleSlotWorker, Slot, SlotInfo}; use its_primitives::{ - traits::{Block as SidechainBlockTrait, SignedBlock}, + traits::{Block as SidechainBlockTrait, Header as HeaderTrait, SignedBlock}, types::block::BlockHash, }; use its_validateer_fetch::ValidateerFetch; @@ -125,7 +125,7 @@ pub enum SlotClaimStrategy { type AuthorityId

=

::Public; type ShardIdentifierFor = - <::Block as SidechainBlockTrait>::ShardIdentifier; + <<::Block as SidechainBlockTrait>::HeaderType as HeaderTrait>::ShardIdentifier; impl SimpleSlotWorker @@ -137,7 +137,7 @@ where E: Environment, E::Proposer: Proposer, SignedSidechainBlock: SignedBlock + Send + 'static, - OcallApi: ValidateerFetch + GetStorageVerified + Send + 'static, + OcallApi: ValidateerFetch + EnclaveOnChainOCallApi + Send + 'static, ImportTrigger: TriggerParentchainBlockImport>, { type Proposer = E::Proposer; @@ -239,7 +239,7 @@ fn authorities( header: &ParentchainHeader, ) -> Result>, ConsensusError> where - ValidateerFetcher: ValidateerFetch + GetStorageVerified, + ValidateerFetcher: ValidateerFetch + EnclaveOnChainOCallApi, P: Pair, ParentchainHeader: ParentchainHeaderTrait, { @@ -443,7 +443,10 @@ mod tests { let result = SimpleSlotWorker::on_slot(&mut aura, slot_info, Default::default()).unwrap(); - assert_eq!(result.block.block.layer_one_head, latest_parentchain_header.hash()); + assert_eq!( + result.block.block.block_data().layer_one_head, + latest_parentchain_header.hash() + ); assert!(parentchain_block_import_trigger.has_import_been_called()); } diff --git a/sidechain/consensus/aura/src/proposer_factory.rs b/sidechain/consensus/aura/src/proposer_factory.rs index 193141ab05..cb63af1766 100644 --- a/sidechain/consensus/aura/src/proposer_factory.rs +++ b/sidechain/consensus/aura/src/proposer_factory.rs @@ -22,7 +22,8 @@ use itp_types::H256; use its_block_composer::ComposeBlockAndConfirmation; use its_consensus_common::{Environment, Error as ConsensusError}; use its_primitives::traits::{ - Block as SidechainBlockTrait, ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlockTrait, }; use its_state::{SidechainState, SidechainSystemExt, StateHash}; use its_top_pool_executor::call_operator::TopPoolCallOperator; @@ -66,8 +67,9 @@ where NumberFor: BlockNumberOps, SignedSidechainBlock: SignedSidechainBlockTrait + 'static, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, TopPoolExecutor: TopPoolCallOperator + Send + Sync + 'static, StfExecutor: StateUpdateProposer + Send + Sync + 'static, diff --git a/sidechain/consensus/aura/src/slot_proposer.rs b/sidechain/consensus/aura/src/slot_proposer.rs index 00e1586333..2223cdc3ea 100644 --- a/sidechain/consensus/aura/src/slot_proposer.rs +++ b/sidechain/consensus/aura/src/slot_proposer.rs @@ -22,7 +22,8 @@ use itp_types::H256; use its_block_composer::ComposeBlockAndConfirmation; use its_consensus_common::{Error as ConsensusError, Proposal, Proposer}; use its_primitives::traits::{ - Block as SidechainBlockTrait, ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlockTrait, }; use its_state::{SidechainDB, SidechainState, SidechainSystemExt, StateHash}; use its_top_pool_executor::call_operator::TopPoolCallOperator; @@ -64,8 +65,9 @@ impl: BlockNumberOps, SignedSidechainBlock: SignedSidechainBlockTrait + 'static, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, StfExecutor: StateUpdateProposer, ExternalitiesFor: SgxExternalitiesTrait + SidechainState + SidechainSystemExt + StateHash, diff --git a/sidechain/consensus/aura/src/test/block_importer_tests.rs b/sidechain/consensus/aura/src/test/block_importer_tests.rs index f829b4968b..49b0aadc12 100644 --- a/sidechain/consensus/aura/src/test/block_importer_tests.rs +++ b/sidechain/consensus/aura/src/test/block_importer_tests.rs @@ -15,14 +15,12 @@ */ -use crate::{ - block_importer::BlockImporter, - test::{fixtures::validateer, mocks::onchain_mock::OnchainMock}, - ShardIdentifierFor, -}; +use crate::{block_importer::BlockImporter, test::fixtures::validateer, ShardIdentifierFor}; use codec::Encode; use core::assert_matches::assert_matches; use itc_parentchain_block_import_dispatcher::trigger_parentchain_block_import_mock::TriggerParentchainBlockImportMock; +use itc_parentchain_light_client::mocks::validator_access_mock::ValidatorAccessMock; +use itp_extrinsics_factory::mock::ExtrinsicsFactoryMock; use itp_sgx_crypto::{aes::Aes, StateCrypto}; use itp_stf_state_handler::handle_state::HandleState; use itp_test::{ @@ -30,9 +28,9 @@ use itp_test::{ parentchain_block_builder::ParentchainBlockBuilder, parentchain_header_builder::ParentchainHeaderBuilder, }, - mock::handle_state_mock::HandleStateMock, + mock::{handle_state_mock::HandleStateMock, onchain_mock::OnchainMock}, }; -use itp_time_utils::duration_now; +use itp_time_utils::{duration_now, now_as_u64}; use itp_types::{Block as ParentchainBlock, Header as ParentchainHeader, H256}; use its_consensus_common::{BlockImport, Error as ConsensusError}; use its_primitives::{ @@ -40,7 +38,11 @@ use its_primitives::{ types::{Block as SidechainBlock, SignedBlock as SignedSidechainBlock}, }; use its_state::{SidechainDB, SidechainState, StateUpdate}; -use its_test::sidechain_block_builder::SidechainBlockBuilder; +use its_test::{ + sidechain_block_builder::SidechainBlockBuilder, + sidechain_block_data_builder::SidechainBlockDataBuilder, + sidechain_header_builder::SidechainHeaderBuilder, +}; use its_top_pool_executor::call_operator_mock::TopPoolCallOperatorMock; use sgx_externalities::{SgxExternalities, SgxExternalitiesDiffType}; use sp_core::{blake2_256, ed25519::Pair}; @@ -62,6 +64,8 @@ type TestBlockImporter = BlockImporter< Aes, TestTopPoolCallOperator, TestParentchainBlockImportTrigger, + ExtrinsicsFactoryMock, + ValidatorAccessMock, >; fn state_key() -> Aes { @@ -79,7 +83,7 @@ fn default_authority() -> Pair { fn test_fixtures( parentchain_block_import_trigger: Arc, ) -> (TestBlockImporter, Arc, Arc) { - let state_handler = Arc::new(HandleStateMock::default()); + let state_handler = Arc::new(HandleStateMock::from_shard(shard()).unwrap()); let top_pool_call_operator = Arc::new(TestTopPoolCallOperator::default()); let ocall_api = Arc::new( OnchainMock::default() @@ -107,7 +111,7 @@ fn test_fixtures_with_default_import_trigger( fn empty_encrypted_state_update(state_handler: &HandleStateMock) -> Vec { let apriori_state_hash = - TestSidechainState::new(state_handler.load_initialized(&shard()).unwrap()).state_hash(); + TestSidechainState::new(state_handler.load(&shard()).unwrap()).state_hash(); let empty_state_diff = SgxExternalitiesDiffType::default(); let mut state_update = StateUpdate::new(apriori_state_hash, apriori_state_hash, empty_state_diff).encode(); @@ -122,12 +126,21 @@ fn signed_block( ) -> SignedSidechainBlock { let state_update = empty_encrypted_state_update(state_handler); - SidechainBlockBuilder::default() - .with_timestamp(duration_now().as_millis() as u64) - .with_parentchain_block_hash(parentchain_header.hash()) + let header = SidechainHeaderBuilder::default() .with_parent_hash(H256::default()) .with_shard(shard()) + .build(); + + let block_data = SidechainBlockDataBuilder::default() + .with_timestamp(now_as_u64()) + .with_layer_one_head(parentchain_header.hash()) + .with_signer(signer.clone()) .with_payload(state_update) + .build(); + + SidechainBlockBuilder::default() + .with_header(header) + .with_block_data(block_data) .with_signer(signer) .build_signed() } @@ -158,15 +171,24 @@ fn block_import_with_invalid_signature_fails() { let parentchain_header = ParentchainHeaderBuilder::default().build(); let state_update = empty_encrypted_state_update(state_handler.as_ref()); - let block = SidechainBlockBuilder::default() - .with_timestamp(duration_now().as_millis() as u64) - .with_parentchain_block_hash(parentchain_header.hash()) - .with_signer(Keyring::Charlie.pair()) + let header = SidechainHeaderBuilder::default() .with_parent_hash(H256::default()) .with_shard(shard()) + .build(); + + let block_data = SidechainBlockDataBuilder::default() + .with_timestamp(duration_now().as_millis() as u64) + .with_layer_one_head(parentchain_header.hash()) + .with_signer(Keyring::Charlie.pair()) .with_payload(state_update) .build(); + let block = SidechainBlockBuilder::default() + .with_signer(Keyring::Charlie.pair()) + .with_header(header) + .with_block_data(block_data) + .build(); + // Bob signs the block, but Charlie is set as the author -> invalid signature. let invalid_signature_block: SignedSidechainBlock = block.sign_block(&Keyring::Bob.pair()); diff --git a/sidechain/consensus/aura/src/test/fixtures/types.rs b/sidechain/consensus/aura/src/test/fixtures/types.rs index 9f1a3d5b9a..0a0266d17c 100644 --- a/sidechain/consensus/aura/src/test/fixtures/types.rs +++ b/sidechain/consensus/aura/src/test/fixtures/types.rs @@ -24,7 +24,10 @@ use itc_parentchain_block_import_dispatcher::trigger_parentchain_block_import_mo use itp_test::mock::onchain_mock::OnchainMock; use itp_types::Block as ParentchainBlock; use its_primitives::{ - traits::{Block as SidechainBlockTrait, SignedBlock as SignedBlockTrait}, + traits::{ + Block as SidechainBlockTrait, Header as SidechainHeaderTrait, + SignedBlock as SignedBlockTrait, + }, types::block::{Block as SidechainBlock, SignedBlock as SignedSidechainBlock}, }; use sp_runtime::{app_crypto::ed25519, generic::SignedBlock}; @@ -32,7 +35,7 @@ use sp_runtime::{app_crypto::ed25519, generic::SignedBlock}; type AuthorityPair = ed25519::Pair; pub type ShardIdentifierFor = - <::Block as SidechainBlockTrait>::ShardIdentifier; + <<::Block as SidechainBlockTrait>::HeaderType as SidechainHeaderTrait>::ShardIdentifier; pub type TestAura = Aura< AuthorityPair, diff --git a/sidechain/consensus/aura/src/test/mocks/proposer_mock.rs b/sidechain/consensus/aura/src/test/mocks/proposer_mock.rs index 56088135ae..af0c13d2f9 100644 --- a/sidechain/consensus/aura/src/test/mocks/proposer_mock.rs +++ b/sidechain/consensus/aura/src/test/mocks/proposer_mock.rs @@ -19,7 +19,10 @@ use crate::ConsensusError; use itp_types::{Block as ParentchainBlock, Header}; use its_consensus_common::{Proposal, Proposer}; use its_primitives::types::block::SignedBlock as SignedSidechainBlock; -use its_test::sidechain_block_builder::SidechainBlockBuilder; +use its_test::{ + sidechain_block_builder::SidechainBlockBuilder, + sidechain_block_data_builder::SidechainBlockDataBuilder, +}; use std::time::Duration; pub struct ProposerMock { @@ -32,9 +35,12 @@ impl Proposer for ProposerMock { _max_duration: Duration, ) -> Result, ConsensusError> { Ok(Proposal { - block: SidechainBlockBuilder::random() - .with_parentchain_block_hash(self.parentchain_header.hash()) - .build_signed(), + block: { + let block_data = SidechainBlockDataBuilder::random() + .with_layer_one_head(self.parentchain_header.hash()) + .build(); + SidechainBlockBuilder::random().with_block_data(block_data).build_signed() + }, parentchain_effects: Default::default(), }) diff --git a/sidechain/consensus/aura/src/verifier.rs b/sidechain/consensus/aura/src/verifier.rs index 9f13ad8405..cf16f2c14f 100644 --- a/sidechain/consensus/aura/src/verifier.rs +++ b/sidechain/consensus/aura/src/verifier.rs @@ -15,14 +15,16 @@ */ -use crate::{authorities, slot_author}; +use crate::{authorities, slot_author, EnclaveOnChainOCallApi}; use core::marker::PhantomData; use frame_support::ensure; -use itp_storage_verifier::GetStorageVerified; use its_consensus_common::{Error as ConsensusError, Verifier}; use its_consensus_slots::{slot_from_time_stamp_and_duration, Slot}; use its_primitives::{ - traits::{Block as SidechainBlockTrait, SignedBlock as SignedSidechainBlockTrait}, + traits::{ + Block as SidechainBlockTrait, BlockData, Header as HeaderTrait, + SignedBlock as SignedSidechainBlockTrait, + }, types::block::BlockHash, }; use its_state::LastBlockExt; @@ -59,14 +61,14 @@ where SignedSidechainBlock: SignedSidechainBlockTrait + 'static, SignedSidechainBlock::Block: SidechainBlockTrait, SidechainState: LastBlockExt + Send + Sync, - Context: ValidateerFetch + GetStorageVerified + Send + Sync, + Context: ValidateerFetch + EnclaveOnChainOCallApi + Send + Sync, { type BlockImportParams = SignedSidechainBlock; type Context = Context; fn verify( - &mut self, + &self, signed_block: SignedSidechainBlock, parentchain_header: &ParentchainBlock::Header, ctx: &Self::Context, @@ -77,17 +79,12 @@ where ); let slot = slot_from_time_stamp_and_duration( - Duration::from_millis(signed_block.block().timestamp()), + Duration::from_millis(signed_block.block().block_data().timestamp()), self.slot_duration, ); - verify_author::( - &slot, - signed_block.block(), - parentchain_header, - ctx, - )?; - + // We need to check the ancestry first to ensure that an already imported block does not result + // in an author verification error, but rather a `BlockAlreadyImported` error. match self.sidechain_state.get_last_block() { Some(last_block) => verify_block_ancestry::( signed_block.block(), @@ -96,6 +93,13 @@ where None => ensure_first_block(signed_block.block())?, } + verify_author::( + &slot, + signed_block.block(), + parentchain_header, + ctx, + )?; + Ok(signed_block) } } @@ -112,10 +116,10 @@ where AuthorityPair::Public: Debug, SignedSidechainBlock: SignedSidechainBlockTrait + 'static, ParentchainHeader: ParentchainHeaderTrait, - Context: ValidateerFetch + GetStorageVerified, + Context: ValidateerFetch + EnclaveOnChainOCallApi, { ensure!( - parentchain_head.hash() == block.layer_one_head(), + parentchain_head.hash() == block.block_data().layer_one_head(), ConsensusError::BadParentchainBlock( parentchain_head.hash(), "Invalid parentchain head".into(), @@ -128,11 +132,11 @@ where .ok_or_else(|| ConsensusError::CouldNotGetAuthorities("No authorities found".into()))?; ensure!( - expected_author == block.block_author(), + expected_author == block.block_data().block_author(), ConsensusError::InvalidAuthority(format!( "Expected author: {:?}, author found in block: {:?}", expected_author, - block.block_author() + block.block_data().block_author() )) ); @@ -147,28 +151,31 @@ fn verify_block_ancestry( // We have already imported this block. ensure!( - block.block_number() > last_block.block_number(), - ConsensusError::BlockAlreadyImported(block.block_number(), last_block.block_number()) + block.header().block_number() > last_block.header().block_number(), + ConsensusError::BlockAlreadyImported( + block.header().block_number(), + last_block.header().block_number() + ) ); // We are missing some blocks between our last known block and the one we're trying to import. ensure!( - last_block.block_number() + 1 == block.block_number(), + last_block.header().block_number() + 1 == block.header().block_number(), ConsensusError::BlockAncestryMismatch( - last_block.block_number(), + last_block.header().block_number(), last_block.hash(), format!( "Invalid block number, {} does not succeed {}", - block.block_number(), - last_block.block_number() + block.header().block_number(), + last_block.header().block_number() ) ) ); ensure!( - last_block.hash() == block.parent_hash(), + last_block.hash() == block.header().parent_hash(), ConsensusError::BlockAncestryMismatch( - last_block.block_number(), + last_block.header().block_number(), last_block.hash(), "Parent hash does not match".into(), ) @@ -181,17 +188,17 @@ fn ensure_first_block( block: &SidechainBlock, ) -> Result<(), ConsensusError> { ensure!( - block.block_number() == 1, + block.header().block_number() == 1, ConsensusError::InvalidFirstBlock( - block.block_number(), + block.header().block_number(), "No last block found, expecting first block. But block to import has number != 1" .into() ) ); ensure!( - block.parent_hash() == Default::default(), + block.header().parent_hash() == Default::default(), ConsensusError::InvalidFirstBlock( - block.block_number(), + block.header().block_number(), "No last block found, excepting first block. But block to import has parent_hash != 0" .into() ) @@ -209,54 +216,91 @@ mod tests { }; use core::assert_matches::assert_matches; use frame_support::assert_ok; - use itp_test::mock::onchain_mock::OnchainMock; - use its_test::sidechain_block_builder::SidechainBlockBuilder; + use itp_test::{ + builders::parentchain_header_builder::ParentchainHeaderBuilder, + mock::onchain_mock::OnchainMock, + }; + use its_primitives::types::{header::Header, SignedBlock}; + use its_test::{ + sidechain_block_builder::SidechainBlockBuilder, + sidechain_block_data_builder::SidechainBlockDataBuilder, + sidechain_header_builder::SidechainHeaderBuilder, + }; use sp_keyring::ed25519::Keyring; - use sp_runtime::{app_crypto::ed25519, testing::H256}; + use sp_runtime::testing::H256; fn assert_ancestry_mismatch_err(result: Result) { assert_matches!(result, Err(ConsensusError::BlockAncestryMismatch(_, _, _,))) } - fn block2_builder(signer: ed25519::Pair, parent_hash: H256) -> SidechainBlockBuilder { - block1_builder(signer).with_parent_hash(parent_hash).with_number(2) - } + fn block(signer: Keyring, header: Header) -> SignedBlock { + let block_data = SidechainBlockDataBuilder::default() + .with_signer(signer.pair()) + .with_timestamp(0) + .with_layer_one_head(default_header().hash()) + .build(); - fn block1_builder(signer: ed25519::Pair) -> SidechainBlockBuilder { SidechainBlockBuilder::default() - .with_signer(signer) - .with_parentchain_block_hash(default_header().hash()) - .with_number(1) - .with_timestamp(0) + .with_header(header) + .with_block_data(block_data) + .with_signer(signer.pair()) + .build_signed() + } + + fn block1(signer: Keyring) -> SignedBlock { + let header = SidechainHeaderBuilder::default().with_block_number(1).build(); + + block(signer, header) + } + + fn block2(signer: Keyring, parent_hash: H256) -> SignedBlock { + let header = SidechainHeaderBuilder::default() + .with_parent_hash(parent_hash) + .with_block_number(2) + .build(); + + block(signer, header) + } + + fn block3(signer: Keyring, parent_hash: H256, block_number: u64) -> SignedBlock { + let header = SidechainHeaderBuilder::default() + .with_parent_hash(parent_hash) + .with_block_number(block_number) + .build(); + + block(signer, header) } #[test] fn ensure_first_block_works() { - let b = SidechainBlockBuilder::default().build(); - assert_ok!(ensure_first_block(&b)); + let block = SidechainBlockBuilder::default().build(); + assert_ok!(ensure_first_block(&block)); } #[test] fn ensure_first_block_errs_with_invalid_block_number() { - let b = SidechainBlockBuilder::default().with_number(2).build(); - assert_matches!(ensure_first_block(&b), Err(ConsensusError::InvalidFirstBlock(2, _))) + let header = SidechainHeaderBuilder::default().with_block_number(2).build(); + let block = SidechainBlockBuilder::default().with_header(header).build(); + assert_matches!(ensure_first_block(&block), Err(ConsensusError::InvalidFirstBlock(2, _))) } #[test] fn ensure_first_block_errs_with_invalid_parent_hash() { let parent = H256::random(); - let b = SidechainBlockBuilder::default().with_parent_hash(parent).build(); + let header = SidechainHeaderBuilder::default().with_parent_hash(parent).build(); + let block = SidechainBlockBuilder::default().with_header(header).build(); - assert_matches!(ensure_first_block(&b), Err(ConsensusError::InvalidFirstBlock(_, _))); + assert_matches!(ensure_first_block(&block), Err(ConsensusError::InvalidFirstBlock(_, _))); } #[test] fn verify_block_ancestry_works() { let last_block = SidechainBlockBuilder::default().build(); - let curr_block = SidechainBlockBuilder::default() + let header = SidechainHeaderBuilder::default() .with_parent_hash(last_block.hash()) - .with_number(2) + .with_block_number(2) .build(); + let curr_block = SidechainBlockBuilder::default().with_header(header).build(); assert_ok!(verify_block_ancestry(&curr_block, &last_block)); } @@ -264,10 +308,11 @@ mod tests { #[test] fn verify_block_ancestry_errs_with_invalid_parent_block_number() { let last_block = SidechainBlockBuilder::default().build(); - let curr_block = SidechainBlockBuilder::default() + let header = SidechainHeaderBuilder::default() .with_parent_hash(last_block.hash()) - .with_number(5) + .with_block_number(5) .build(); + let curr_block = SidechainBlockBuilder::default().with_header(header).build(); assert_ancestry_mismatch_err(verify_block_ancestry(&curr_block, &last_block)); } @@ -275,7 +320,8 @@ mod tests { #[test] fn verify_block_ancestry_errs_with_invalid_parent_hash() { let last_block = SidechainBlockBuilder::default().build(); - let curr_block = SidechainBlockBuilder::default().with_number(2).build(); + let header = SidechainHeaderBuilder::default().with_block_number(2).build(); + let curr_block = SidechainBlockBuilder::default().with_header(header).build(); assert_ancestry_mismatch_err(verify_block_ancestry(&curr_block, &last_block)); } @@ -286,30 +332,30 @@ mod tests { let last_block = SidechainBlockBuilder::default().build(); let signer = Keyring::Alice; - let curr_block = block2_builder(signer.pair(), last_block.hash()).build_signed(); + let curr_block = block2(signer, last_block.hash()); let state_mock = StateMock { last_block: Some(last_block) }; let onchain_mock = OnchainMock::default() .with_validateer_set(Some(vec![validateer(signer.public().into())])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); - assert_ok!(aura.verify(curr_block, &default_header(), &onchain_mock)); + assert_ok!(aura_verifier.verify(curr_block, &default_header(), &onchain_mock)); } #[test] fn verify_works_for_first_block() { let signer = Keyring::Alice; - let curr_block = block1_builder(signer.pair()).build_signed(); + let curr_block = block1(signer); let state_mock = StateMock { last_block: None }; let onchain_mock = OnchainMock::default() .with_validateer_set(Some(vec![validateer(signer.public().into())])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); - assert_ok!(aura.verify(curr_block, &default_header(), &onchain_mock)); + assert_ok!(aura_verifier.verify(curr_block, &default_header(), &onchain_mock)); } #[test] @@ -317,7 +363,7 @@ mod tests { let last_block = SidechainBlockBuilder::default().build(); let signer = Keyring::Alice; - let curr_block = block2_builder(signer.pair(), last_block.hash()).build_signed(); + let curr_block = block2(signer, last_block.hash()); let state_mock = StateMock { last_block: Some(last_block) }; let onchain_mock = OnchainMock::default().with_validateer_set(Some(vec![ @@ -325,10 +371,10 @@ mod tests { validateer(signer.public().into()), ])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); assert_matches!( - aura.verify(curr_block, &default_header(), &onchain_mock).unwrap_err(), + aura_verifier.verify(curr_block, &default_header(), &onchain_mock).unwrap_err(), ConsensusError::InvalidAuthority(_) ); } @@ -338,31 +384,35 @@ mod tests { let last_block = SidechainBlockBuilder::default().build(); let signer = Keyring::Alice; - let curr_block = block2_builder(signer.pair(), Default::default()).build_signed(); + let curr_block = block2(signer, Default::default()); let state_mock = StateMock { last_block: Some(last_block) }; let onchain_mock = OnchainMock::default() .with_validateer_set(Some(vec![validateer(signer.public().into())])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); - assert_ancestry_mismatch_err(aura.verify(curr_block, &default_header(), &onchain_mock)); + assert_ancestry_mismatch_err(aura_verifier.verify( + curr_block, + &default_header(), + &onchain_mock, + )); } #[test] fn verify_errs_on_wrong_first_block() { let signer = Keyring::Alice; - let curr_block = block2_builder(signer.pair(), Default::default()).build_signed(); + let curr_block = block2(signer, Default::default()); let state_mock = StateMock { last_block: None }; let onchain_mock = OnchainMock::default() .with_validateer_set(Some(vec![validateer(signer.public().into())])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); assert_matches!( - aura.verify(curr_block, &default_header(), &onchain_mock), + aura_verifier.verify(curr_block, &default_header(), &onchain_mock), Err(ConsensusError::InvalidFirstBlock(2, _)) ); } @@ -374,17 +424,57 @@ mod tests { // Current block has also number 1, same as last. So import should return an error // that a block with this number is already imported. - let curr_block = - block2_builder(signer.pair(), last_block.hash()).with_number(1).build_signed(); + let curr_block = block3(signer, last_block.hash(), 1); + + let state_mock = StateMock { last_block: Some(last_block) }; + let onchain_mock = OnchainMock::default() + .with_validateer_set(Some(vec![validateer(signer.public().into())])); + + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); + + assert_matches!( + aura_verifier.verify(curr_block, &default_header(), &onchain_mock), + Err(ConsensusError::BlockAlreadyImported(1, 1)) + ); + } + + #[test] + fn verify_block_already_imported_error_even_if_parentchain_block_mismatches() { + // This test is to ensure that we get a 'AlreadyImported' error, when the sidechain block + // is already imported, and the parentchain block that is passed into the verifier is newer. + // Important because client of the verifier acts differently for an 'AlreadyImported' error than an 'AncestryErrorMismatch'. + + let signer = Keyring::Alice; + + let parentchain_header_1 = ParentchainHeaderBuilder::default().with_number(1).build(); + let parentchain_header_2 = ParentchainHeaderBuilder::default().with_number(2).build(); + + let block_data = SidechainBlockDataBuilder::default() + .with_layer_one_head(parentchain_header_1.hash()) + .with_signer(signer.pair()) + .build(); + let last_block = SidechainBlockBuilder::default() + .with_block_data(block_data) + .with_signer(signer.pair()) + .build(); + + let block_data_for_signed_block = SidechainBlockDataBuilder::default() + .with_layer_one_head(parentchain_header_1.hash()) + .with_signer(signer.pair()) + .build(); + let signed_block_to_verify = SidechainBlockBuilder::default() + .with_block_data(block_data_for_signed_block) + .with_signer(signer.pair()) + .build_signed(); let state_mock = StateMock { last_block: Some(last_block) }; let onchain_mock = OnchainMock::default() .with_validateer_set(Some(vec![validateer(signer.public().into())])); - let mut aura = TestAuraVerifier::new(SLOT_DURATION, state_mock); + let aura_verifier = TestAuraVerifier::new(SLOT_DURATION, state_mock); assert_matches!( - aura.verify(curr_block, &default_header(), &onchain_mock), + aura_verifier.verify(signed_block_to_verify, &parentchain_header_2, &onchain_mock), Err(ConsensusError::BlockAlreadyImported(1, 1)) ); } diff --git a/sidechain/consensus/common/Cargo.toml b/sidechain/consensus/common/Cargo.toml index 84c351e52b..3cd1762d54 100644 --- a/sidechain/consensus/common/Cargo.toml +++ b/sidechain/consensus/common/Cargo.toml @@ -30,7 +30,7 @@ sgx = [ ] [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false } log = { version = "0.4.14", default-features = false } thiserror = { version = "1.0.26", optional = true } @@ -48,7 +48,7 @@ sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-s thiserror-sgx = { package = "thiserror", git = "https://github.com/mesalock-linux/thiserror-sgx", tag = "sgx_1.1.3", optional = true } # substrate deps -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [dev-dependencies] # local @@ -56,7 +56,7 @@ itp-test = { path = "../../../core-primitives/test" } its-consensus-aura = { path = "../aura" } its-test = { path = "../../test" } # substrate -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # integritee / scs sgx-externalities = { git = "https://github.com/ajuna-network/sgx-runtime", branch = "master" } diff --git a/sidechain/consensus/common/src/block_import.rs b/sidechain/consensus/common/src/block_import.rs index 3d45124b70..fd59670661 100644 --- a/sidechain/consensus/common/src/block_import.rs +++ b/sidechain/consensus/common/src/block_import.rs @@ -22,9 +22,11 @@ use codec::Decode; use itp_ocall_api::EnclaveSidechainOCallApi; use itp_sgx_crypto::StateCrypto; use its_primitives::traits::{ - Block as SidechainBlockT, ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, BlockData, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlockTrait, }; use its_state::{LastBlockExt, SidechainState}; +use log::*; use sp_runtime::traits::Block as ParentchainBlockTrait; use std::vec::Vec; @@ -108,13 +110,23 @@ where parentchain_header: &ParentchainBlock::Header, ) -> Result { let sidechain_block = signed_sidechain_block.block().clone(); - let shard = sidechain_block.shard_id(); + let shard = sidechain_block.header().shard_id(); + + debug!( + "Attempting to import sidechain block (number: {}, parentchain hash: {:?})", + signed_sidechain_block.block().header().block_number(), + signed_sidechain_block.block().block_data().layer_one_head() + ); let peeked_parentchain_header = - self.peek_parentchain_header(&sidechain_block, parentchain_header)?; + self.peek_parentchain_header(&sidechain_block, parentchain_header) + .unwrap_or_else(|e| { + warn!("Could not peek parentchain block, returning latest parentchain block ({:?})", e); + parentchain_header.clone() + }); let block_import_params = self.verify_import(&shard, |state| { - let mut verifier = self.verifier(state); + let verifier = self.verifier(state); verifier.verify( signed_sidechain_block.clone(), @@ -128,7 +140,7 @@ where self.apply_state_update(&shard, |mut state| { let update = state_update_from_encrypted( - block_import_params.block().state_payload(), + block_import_params.block().block_data().encrypted_state_diff(), self.state_key(), )?; diff --git a/sidechain/consensus/common/src/block_import_queue_worker.rs b/sidechain/consensus/common/src/block_import_queue_worker.rs index 903aac7a88..372efb015a 100644 --- a/sidechain/consensus/common/src/block_import_queue_worker.rs +++ b/sidechain/consensus/common/src/block_import_queue_worker.rs @@ -18,7 +18,6 @@ use crate::{Error, Result, SyncBlockFromPeer}; use core::marker::PhantomData; use itp_block_import_queue::PopFromBlockQueue; -use itp_types::H256; use its_primitives::traits::{Block as BlockTrait, SignedBlock as SignedSidechainBlockTrait}; use sp_runtime::traits::Block as ParentchainBlockTrait; use std::sync::Arc; @@ -48,7 +47,7 @@ impl where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: BlockTrait, + SignedSidechainBlock::Block: BlockTrait, BlockImportQueue: PopFromBlockQueue, PeerBlockSyncer: SyncBlockFromPeer, { @@ -74,7 +73,7 @@ impl > where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: BlockTrait, + SignedSidechainBlock::Block: BlockTrait, BlockImportQueue: PopFromBlockQueue, PeerBlockSyncer: SyncBlockFromPeer, { diff --git a/sidechain/consensus/common/src/lib.rs b/sidechain/consensus/common/src/lib.rs index c5d543db07..a8d5ac1680 100644 --- a/sidechain/consensus/common/src/lib.rs +++ b/sidechain/consensus/common/src/lib.rs @@ -58,7 +58,7 @@ where /// Verify the given data and return the `BlockImportParams` if successful fn verify( - &mut self, + &self, block: SignedSidechainBlock, parentchain_header: &ParentchainBlock::Header, ctx: &Self::Context, diff --git a/sidechain/consensus/common/src/peer_block_sync.rs b/sidechain/consensus/common/src/peer_block_sync.rs index 49167cef17..67c210290f 100644 --- a/sidechain/consensus/common/src/peer_block_sync.rs +++ b/sidechain/consensus/common/src/peer_block_sync.rs @@ -20,7 +20,10 @@ use core::marker::PhantomData; use itp_ocall_api::EnclaveSidechainOCallApi; use itp_types::H256; use its_primitives::{ - traits::{Block as BlockTrait, ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait}, + traits::{ + Block as BlockTrait, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlockTrait, + }, types::BlockHash, }; use log::*; @@ -55,7 +58,8 @@ impl where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: BlockTrait, + <::Block as BlockTrait>::HeaderType: + HeaderTrait, BlockImporter: BlockImport, SidechainOCallApi: EnclaveSidechainOCallApi, { @@ -82,7 +86,7 @@ where let mut latest_imported_parentchain_header = current_parentchain_header.clone(); for block_to_import in blocks_to_import { - let block_number = block_to_import.block().block_number(); + let block_number = block_to_import.block().header().block_number(); latest_imported_parentchain_header = match self .importer @@ -112,7 +116,8 @@ impl where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: BlockTrait, + <::Block as BlockTrait>::HeaderType: + HeaderTrait, BlockImporter: BlockImport, SidechainOCallApi: EnclaveSidechainOCallApi, { @@ -121,8 +126,8 @@ where sidechain_block: SignedSidechainBlock, current_parentchain_header: &ParentchainBlock::Header, ) -> Result { - let shard_identifier = sidechain_block.block().shard_id(); - let sidechain_block_number = sidechain_block.block().block_number(); + let shard_identifier = sidechain_block.block().header().shard_id(); + let sidechain_block_number = sidechain_block.block().header().block_number(); // Attempt to import the block - in case we encounter an ancestry error, we go into // peer fetching mode to fetch sidechain blocks from a peer and import those first. diff --git a/sidechain/consensus/common/src/test/mocks/verifier_mock.rs b/sidechain/consensus/common/src/test/mocks/verifier_mock.rs index 4e2ae1e604..6e104574ea 100644 --- a/sidechain/consensus/common/src/test/mocks/verifier_mock.rs +++ b/sidechain/consensus/common/src/test/mocks/verifier_mock.rs @@ -51,7 +51,7 @@ where type Context = VerifierContext; fn verify( - &mut self, + &self, _block: SignedSidechainBlock, _parentchain_header: &ParentchainBlock::Header, _ctx: &Self::Context, diff --git a/sidechain/consensus/slots/Cargo.toml b/sidechain/consensus/slots/Cargo.toml index bc9d73ed8e..c0cf2459ac 100644 --- a/sidechain/consensus/slots/Cargo.toml +++ b/sidechain/consensus/slots/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] log = { version = "0.4.14", default-features = false } -codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } derive_more = "0.99.16" lazy_static = { version = "1.1.0", features = ["spin_no_std"] } @@ -19,8 +19,8 @@ futures-timer = { version = "3.0.1", optional = true } sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true, features = ["untrusted_time"] } # substrate deps -sp-consensus-slots = { version = "0.10.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-consensus-slots = { version = "0.10.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # local deps itp-sgx-io = { path = "../../../core-primitives/sgx/io", default-features = false } @@ -30,7 +30,8 @@ its-consensus-common = { path = "../common", default-features = false } its-primitives = { path = "../../primitives", default-features = false } [dev-dependencies] -sp-keyring = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +its-test = { path = "../../test" } +sp-keyring = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } tokio = "*" diff --git a/sidechain/consensus/slots/src/lib.rs b/sidechain/consensus/slots/src/lib.rs index d7a3c09b67..37dd4d0874 100644 --- a/sidechain/consensus/slots/src/lib.rs +++ b/sidechain/consensus/slots/src/lib.rs @@ -37,7 +37,8 @@ use itp_time_utils::{duration_now, remaining_time}; use itp_types::OpaqueCall; use its_consensus_common::{Error as ConsensusError, Proposer}; use its_primitives::traits::{ - Block as SidechainBlock, ShardIdentifierFor, SignedBlock as SignedSidechainBlock, + Block as SidechainBlockTrait, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlock, }; use log::{debug, info, warn}; pub use slots::*; @@ -258,7 +259,7 @@ pub trait SimpleSlotWorker { } info!("Proposing sidechain block (number: {}, hash: {}) based on parentchain block (number: {:?}, hash: {:?})", - proposing.block.block().block_number(), proposing.block.hash(), + proposing.block.block().header().block_number(), proposing.block.hash(), latest_imported_parentchain_header.number(), latest_imported_parentchain_header.hash() ); diff --git a/sidechain/consensus/slots/src/slots.rs b/sidechain/consensus/slots/src/slots.rs index e77240aa56..62d1187162 100644 --- a/sidechain/consensus/slots/src/slots.rs +++ b/sidechain/consensus/slots/src/slots.rs @@ -21,11 +21,11 @@ pub use sp_consensus_slots::Slot; -use itp_sgx_io::SealedIO; +use itp_sgx_io::StaticSealedIO; use itp_time_utils::duration_now; use its_consensus_common::Error as ConsensusError; use its_primitives::traits::{ - Block as SidechainBlockTrait, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, BlockData, SignedBlock as SignedSidechainBlockTrait, }; use sp_runtime::traits::Block as ParentchainBlockTrait; use std::time::Duration; @@ -86,7 +86,7 @@ pub(crate) fn timestamp_within_slot< slot: &SlotInfo, proposal: &SignedSidechainBlock, ) -> bool { - let proposal_stamp = proposal.block().timestamp(); + let proposal_stamp = proposal.block().block_data().timestamp(); slot.timestamp.as_millis() as u64 <= proposal_stamp && slot.ends_at.as_millis() as u64 >= proposal_stamp @@ -127,12 +127,12 @@ pub trait GetLastSlot { fn set_last_slot(&mut self, slot: Slot) -> Result<(), ConsensusError>; } -impl> GetLastSlot for T { +impl> GetLastSlot for T { fn get_last_slot(&self) -> Result { - Self::unseal() + T::unseal_from_static_file() } fn set_last_slot(&mut self, slot: Slot) -> Result<(), ConsensusError> { - Self::seal(slot) + T::seal_to_static_file(slot) } } @@ -140,7 +140,7 @@ impl> GetLastSlot for T { pub mod sgx { use super::*; use codec::{Decode, Encode}; - use itp_sgx_io::{seal, unseal, SealedIO}; + use itp_sgx_io::{seal, unseal, StaticSealedIO}; use lazy_static::lazy_static; use std::sync::SgxRwLock; @@ -152,11 +152,11 @@ pub mod sgx { const LAST_SLOT_BIN: &'static str = "last_slot.bin"; - impl SealedIO for LastSlotSeal { + impl StaticSealedIO for LastSlotSeal { type Error = ConsensusError; type Unsealed = Slot; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { let _ = FILE_LOCK.read().map_err(|e| Self::Error::Other(format!("{:?}", e).into()))?; match unseal(LAST_SLOT_BIN) { @@ -168,7 +168,7 @@ pub mod sgx { } } - fn seal(unsealed: Self::Unsealed) -> Result<(), Self::Error> { + fn seal_to_static_file(unsealed: Self::Unsealed) -> Result<(), Self::Error> { let _ = FILE_LOCK.write().map_err(|e| Self::Error::Other(format!("{:?}", e).into()))?; Ok(unsealed.using_encoded(|bytes| seal(bytes, LAST_SLOT_BIN))?) } @@ -179,46 +179,44 @@ pub mod sgx { mod tests { use super::*; use core::assert_matches::assert_matches; - use itp_sgx_io::SealedIO; + use itp_sgx_io::StaticSealedIO; use itp_types::{Block as ParentchainBlock, Header as ParentchainHeader}; use its_primitives::{ traits::{Block as BlockT, SignBlock}, types::block::{Block, SignedBlock}, }; - use sp_keyring::ed25519::{ed25519, Keyring}; - use sp_runtime::{testing::H256, traits::Header as HeaderT}; + use its_test::{ + sidechain_block_data_builder::SidechainBlockDataBuilder, + sidechain_header_builder::SidechainHeaderBuilder, + }; + use sp_keyring::ed25519::Keyring; + use sp_runtime::traits::Header as HeaderT; use std::{fmt::Debug, time::SystemTime}; const SLOT_DURATION: Duration = Duration::from_millis(1000); struct LastSlotSealMock; - impl SealedIO for LastSlotSealMock { + impl StaticSealedIO for LastSlotSealMock { type Error = ConsensusError; type Unsealed = Slot; - fn unseal() -> Result { + fn unseal_from_static_file() -> Result { Ok(slot_from_time_stamp_and_duration(duration_now(), SLOT_DURATION)) } - fn seal(_unsealed: Self::Unsealed) -> Result<(), Self::Error> { + fn seal_to_static_file(_unsealed: Self::Unsealed) -> Result<(), Self::Error> { println!("Seal method stub called."); Ok(()) } } fn test_block_with_time_stamp(timestamp: u64) -> SignedBlock { - Block::new( - ed25519::Public([0; 32]), - 0, - H256::random(), - H256::random(), - H256::random(), - Default::default(), - Default::default(), - timestamp, - ) - .sign_block(&Keyring::Alice.pair()) + let header = SidechainHeaderBuilder::default().build(); + + let block_data = SidechainBlockDataBuilder::default().with_timestamp(timestamp).build(); + + Block::new(header, block_data).sign_block(&Keyring::Alice.pair()) } fn slot(slot: u64) -> SlotInfo { diff --git a/sidechain/primitives/Cargo.toml b/sidechain/primitives/Cargo.toml index d28c504b7f..fd0fcca572 100644 --- a/sidechain/primitives/Cargo.toml +++ b/sidechain/primitives/Cargo.toml @@ -5,13 +5,13 @@ authors = ["Integritee AG "] edition = "2018" [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive", "full"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "full"] } serde = { version = "1.0", optional = true, features = ["derive"]} # substrate deps -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] default = ["std"] diff --git a/sidechain/primitives/src/traits/mod.rs b/sidechain/primitives/src/traits/mod.rs index 8131cb525c..7c3a4575be 100644 --- a/sidechain/primitives/src/traits/mod.rs +++ b/sidechain/primitives/src/traits/mod.rs @@ -26,52 +26,88 @@ use sp_core::{blake2_256, Pair, Public, H256}; use sp_runtime::traits::Member; use sp_std::{fmt::Debug, prelude::*}; -/// Abstraction around a sidechain block. -/// Todo: Make more generic. -pub trait Block: Encode + Decode + Send + Sync + Debug + Clone { - /// Identifier for the shards +pub trait Header: Encode + Decode + Clone { + /// Identifier for the shards. type ShardIdentifier: Encode + Decode + Hash + Copy + Member; - /// Public key type of the block author - type Public: Public; - - /// get the block number + /// Get block number. fn block_number(&self) -> u64; /// get parent hash of block fn parent_hash(&self) -> H256; + /// get shard id of block + fn shard_id(&self) -> Self::ShardIdentifier; + /// get hash of the block's payload + fn block_data_hash(&self) -> H256; + + /// get the `blake2_256` hash of the header. + fn hash(&self) -> H256 { + self.using_encoded(blake2_256).into() + } + + fn new( + block_number: u64, + parent_hash: H256, + shard: Self::ShardIdentifier, + block_data_hash: H256, + ) -> Self; +} + +pub trait BlockData: Encode + Decode + Send + Sync + Debug + Clone { + /// Public key type of the block author + type Public: Public; + /// get timestamp of block fn timestamp(&self) -> u64; /// get layer one head of block fn layer_one_head(&self) -> H256; - /// get shard id of block - fn shard_id(&self) -> Self::ShardIdentifier; /// get author of block fn block_author(&self) -> &Self::Public; /// get reference of extrinsics of block fn signed_top_hashes(&self) -> &[H256]; /// get encrypted payload - fn state_payload(&self) -> &Vec; + fn encrypted_state_diff(&self) -> &Vec; /// get the `blake2_256` hash of the block fn hash(&self) -> H256 { self.using_encoded(blake2_256).into() } - /// Todo: group arguments in structs -> Header - #[allow(clippy::too_many_arguments)] + fn new( author: Self::Public, - block_number: u64, - parent_hash: H256, layer_one_head: H256, - shard: Self::ShardIdentifier, signed_top_hashes: Vec, encrypted_payload: Vec, timestamp: u64, ) -> Self; } +/// Abstraction around a sidechain block. +pub trait Block: Encode + Decode + Send + Sync + Debug + Clone { + /// Sidechain block header type. + type HeaderType: Header; + + /// Sidechain block data type. + type BlockDataType: BlockData; + + /// Public key type of the block author + type Public: Public; + + /// get the `blake2_256` hash of the block + fn hash(&self) -> H256 { + self.header().hash() + } + + /// Get header of the block. + fn header(&self) -> &Self::HeaderType; + + /// Get header of the block. + fn block_data(&self) -> &Self::BlockDataType; + + fn new(header: Self::HeaderType, block_data: Self::BlockDataType) -> Self; +} + /// ShardIdentifier for a [`SignedBlock`] pub type ShardIdentifierFor = - <::Block as Block>::ShardIdentifier; +<<::Block as Block>::HeaderType as Header>::ShardIdentifier; /// A block and it's corresponding signature by the [`Block`] author. pub trait SignedBlock: Encode + Decode + Send + Sync + Debug + Clone { diff --git a/sidechain/primitives/src/types/block.rs b/sidechain/primitives/src/types/block.rs index 52a0612d61..c4c2420a5d 100644 --- a/sidechain/primitives/src/types/block.rs +++ b/sidechain/primitives/src/types/block.rs @@ -15,11 +15,13 @@ */ -use crate::traits::{Block as BlockT, SignedBlock as SignedBlockT}; +use crate::{ + traits::{Block as BlockTrait, SignedBlock as SignedBlockTrait}, + types::{block_data::BlockData, header::Header}, +}; use codec::{Decode, Encode}; use sp_core::{ed25519, H256}; use sp_runtime::{traits::Verify, MultiSignature}; -use sp_std::vec::Vec; pub type BlockHash = H256; pub type BlockNumber = u64; @@ -40,93 +42,44 @@ pub type Signature = MultiSignature; #[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct SignedBlock { + /// Plain sidechain block without author signature. pub block: Block, - /// block author signature + /// Block author signature. pub signature: Signature, } -/// simplified block structure for relay chain submission as an extrinsic +/// Simplified block structure for relay chain submission as an extrinsic. #[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Block { - pub block_number: BlockNumber, - pub parent_hash: H256, - pub timestamp: u64, - /// Parentchain header this block is based on - pub layer_one_head: H256, - pub shard_id: ShardIdentifier, - /// must be registered on layer one as an enclave for the respective shard - pub block_author: ed25519::Public, - pub signed_top_hashes: Vec, - // encrypted state payload - pub state_payload: Vec, + /// Sidechain Header + pub header: Header, + + /// Sidechain Block data + pub block_data: BlockData, } -impl BlockT for Block { - type ShardIdentifier = H256; +impl BlockTrait for Block { + type HeaderType = Header; + + type BlockDataType = BlockData; type Public = ed25519::Public; - ///get block number - fn block_number(&self) -> BlockNumber { - self.block_number - } - /// get parent hash of block - fn parent_hash(&self) -> H256 { - self.parent_hash - } - /// get timestamp of block - fn timestamp(&self) -> Timestamp { - self.timestamp - } - /// get layer one head of block - fn layer_one_head(&self) -> H256 { - self.layer_one_head - } - /// get shard id of block - fn shard_id(&self) -> Self::ShardIdentifier { - self.shard_id - } - /// get author of block - fn block_author(&self) -> &Self::Public { - &self.block_author + fn header(&self) -> &Self::HeaderType { + &self.header } - /// get reference of extrinisics of block - fn signed_top_hashes(&self) -> &[H256] { - &self.signed_top_hashes - } - /// get encrypted payload - fn state_payload(&self) -> &Vec { - &self.state_payload + + fn block_data(&self) -> &Self::BlockDataType { + &self.block_data } - /// Constructs an unsigned block - /// Todo: group arguments in structs. - #[allow(clippy::too_many_arguments)] - fn new( - author: Self::Public, - block_number: u64, - parent_hash: H256, - layer_one_head: H256, - shard: Self::ShardIdentifier, - signed_top_hashes: Vec, - encrypted_payload: Vec, - timestamp: Timestamp, - ) -> Block { - // create block - Block { - block_number, - parent_hash, - timestamp, - layer_one_head, - signed_top_hashes, - shard_id: shard, - block_author: author, - state_payload: encrypted_payload, - } + + fn new(header: Self::HeaderType, block_data: Self::BlockDataType) -> Self { + Self { header, block_data } } } -impl SignedBlockT for SignedBlock { +impl SignedBlockTrait for SignedBlock { type Block = Block; type Public = ed25519::Public; @@ -149,15 +102,16 @@ impl SignedBlockT for SignedBlock { /// Verifies the signature of a Block fn verify_signature(&self) -> bool { - self.block - .using_encoded(|p| self.signature.verify(p, &self.block.block_author.into())) + self.block.using_encoded(|p| { + self.signature.verify(p, &self.block.block_data().block_author.into()) + }) } } #[cfg(test)] mod tests { use super::*; - use crate::traits::{Block as BlockT, SignBlock}; + use crate::traits::{Block as BlockT, BlockData, Header, SignBlock}; use sp_core::Pair; use std::time::{SystemTime, UNIX_EPOCH}; @@ -167,16 +121,16 @@ mod tests { } fn test_block() -> Block { - Block::new( + let header = Header::new(0, H256::random(), H256::random(), Default::default()); + let block_data = BlockData::new( ed25519::Pair::from_string("//Alice", None).unwrap().public().into(), - 0, - H256::random(), - H256::random(), H256::random(), Default::default(), Default::default(), timestamp_now(), - ) + ); + + Block::new(header, block_data) } #[test] @@ -198,7 +152,7 @@ mod tests { let signer = ed25519::Pair::from_string("//Alice", None).unwrap(); let mut signed_block: SignedBlock = test_block().sign_block(&signer); - signed_block.block.block_number = 1; + signed_block.block.header.block_number = 1; assert!(!signed_block.verify_signature()); } diff --git a/sidechain/primitives/src/types/block_data.rs b/sidechain/primitives/src/types/block_data.rs new file mode 100644 index 0000000000..a48d4148e4 --- /dev/null +++ b/sidechain/primitives/src/types/block_data.rs @@ -0,0 +1,82 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::traits::BlockData as BlockDataTrait; +use codec::{Decode, Encode}; +use sp_core::{ed25519, H256}; +use sp_std::vec::Vec; + +pub type Timestamp = u64; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct BlockData { + pub timestamp: u64, + /// Parentchain header this block is based on. + pub layer_one_head: H256, + /// Must be registered on layer one as an enclave for the respective shard. + pub block_author: ed25519::Public, + /// Hashes of signed trusted operations. + pub signed_top_hashes: Vec, + /// Encrypted state payload. + pub encrypted_state_diff: Vec, +} + +impl BlockDataTrait for BlockData { + type Public = ed25519::Public; + + /// Get timestamp of block. + fn timestamp(&self) -> Timestamp { + self.timestamp + } + /// Get layer one head of block. + fn layer_one_head(&self) -> H256 { + self.layer_one_head + } + /// Get author of block. + fn block_author(&self) -> &Self::Public { + &self.block_author + } + /// Get reference of extrinisics of block. + fn signed_top_hashes(&self) -> &[H256] { + &self.signed_top_hashes + } + /// Get encrypted payload. + fn encrypted_state_diff(&self) -> &Vec { + &self.encrypted_state_diff + } + /// Constructs block data. + fn new( + block_author: Self::Public, + layer_one_head: H256, + signed_top_hashes: Vec, + encrypted_state_diff: Vec, + timestamp: Timestamp, + ) -> BlockData { + // create block + BlockData { + timestamp, + layer_one_head, + signed_top_hashes, + block_author, + encrypted_state_diff, + } + } +} diff --git a/sidechain/primitives/src/types/header.rs b/sidechain/primitives/src/types/header.rs new file mode 100644 index 0000000000..4689ddb7ed --- /dev/null +++ b/sidechain/primitives/src/types/header.rs @@ -0,0 +1,67 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +use crate::traits::Header as HeaderTrait; +use codec::{Decode, Encode}; +use sp_core::H256; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +pub type ShardIdentifier = H256; + +#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug, Copy)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct Header { + /// The parent hash. + pub parent_hash: H256, + + /// The block number. + pub block_number: u64, + + /// The Shard id. + pub shard_id: ShardIdentifier, + + /// The payload hash. + pub block_data_hash: H256, +} + +impl HeaderTrait for Header { + type ShardIdentifier = H256; + + fn block_number(&self) -> u64 { + self.block_number + } + fn parent_hash(&self) -> H256 { + self.parent_hash + } + fn shard_id(&self) -> Self::ShardIdentifier { + self.shard_id + } + fn block_data_hash(&self) -> H256 { + self.block_data_hash + } + + fn new( + block_number: u64, + parent_hash: H256, + shard: Self::ShardIdentifier, + block_data_hash: H256, + ) -> Header { + Header { block_number, parent_hash, shard_id: shard, block_data_hash } + } +} diff --git a/sidechain/primitives/src/types/mod.rs b/sidechain/primitives/src/types/mod.rs index e080023224..2056953387 100644 --- a/sidechain/primitives/src/types/mod.rs +++ b/sidechain/primitives/src/types/mod.rs @@ -16,5 +16,7 @@ */ pub mod block; +pub mod block_data; +pub mod header; pub use block::*; diff --git a/sidechain/rpc-handler/Cargo.toml b/sidechain/rpc-handler/Cargo.toml index 411800eb77..b332d904cc 100644 --- a/sidechain/rpc-handler/Cargo.toml +++ b/sidechain/rpc-handler/Cargo.toml @@ -9,17 +9,17 @@ resolver = "2" [features] default = ["std"] std = [ + "itp-top-pool-author/std", "itp-types/std", "its-primitives/std", - "its-top-pool-rpc-author/std", "jsonrpc-core", "log/std", "rust-base58", ] sgx = [ "sgx_tstd", + "itp-top-pool-author/sgx", "itp-types/sgx", - "its-top-pool-rpc-author/sgx", "jsonrpc-core_sgx", "rust-base58_sgx", ] @@ -30,9 +30,9 @@ sgx_types = { branch = "master", git = "https://github.com/apache/teaclave-sgx-s sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } # local dependencies +itp-top-pool-author = { path = "../../core-primitives/top-pool-author", default-features = false } itp-types = { path = "../../core-primitives/types", default-features = false } its-primitives = { path = "../primitives", default-features = false } -its-top-pool-rpc-author = { path = "../top-pool-rpc-author", default-features = false } # sgx enabled external libraries rust-base58_sgx = { package = "rust-base58", rev = "sgx_1.1.3", git = "https://github.com/mesalock-linux/rust-base58-sgx", optional = true, default-features = false, features = ["mesalock_sgx"] } @@ -43,6 +43,6 @@ rust-base58 = { package = "rust-base58", version = "0.0.4", optional = true } jsonrpc-core = { version = "18", optional = true } # no-std compatible libraries -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } diff --git a/sidechain/rpc-handler/src/direct_top_pool_api.rs b/sidechain/rpc-handler/src/direct_top_pool_api.rs index 420f146cfe..44dd859fa3 100644 --- a/sidechain/rpc-handler/src/direct_top_pool_api.rs +++ b/sidechain/rpc-handler/src/direct_top_pool_api.rs @@ -25,10 +25,10 @@ use rust_base58::base58::FromBase58; use base58::FromBase58; use codec::{Decode, Encode}; +use itp_top_pool_author::traits::AuthorApi; use itp_types::{ DirectRequestStatus, Request, RpcReturnValue, ShardIdentifier, TrustedOperationStatus, }; -use its_top_pool_rpc_author::traits::AuthorApi; use jsonrpc_core::{ futures::executor, serde_json::json, Error as RpcError, IoHandler, Params, Value, }; @@ -37,7 +37,7 @@ use std::{borrow::ToOwned, format, string::String, sync::Arc, vec, vec::Vec}; type Hash = sp_core::H256; pub fn add_top_pool_direct_rpc_methods( - rpc_author: Arc, + top_pool_author: Arc, mut io_handler: IoHandler, ) -> IoHandler where @@ -45,7 +45,7 @@ where { // author_submitAndWatchExtrinsic let author_submit_and_watch_extrinsic_name: &str = "author_submitAndWatchExtrinsic"; - let submit_watch_author = rpc_author.clone(); + let submit_watch_author = top_pool_author.clone(); io_handler.add_sync_method(author_submit_and_watch_extrinsic_name, move |params: Params| { match params.parse::>() { Ok(encoded_params) => match Request::decode(&mut encoded_params.as_slice()) { @@ -80,7 +80,7 @@ where // author_submitExtrinsic let author_submit_extrinsic_name: &str = "author_submitExtrinsic"; - let submit_author = rpc_author.clone(); + let submit_author = top_pool_author.clone(); io_handler.add_sync_method(author_submit_extrinsic_name, move |params: Params| { match params.parse::>() { Ok(encoded_params) => match Request::decode(&mut encoded_params.as_slice()) { @@ -115,7 +115,7 @@ where // author_pendingExtrinsics let author_pending_extrinsic_name: &str = "author_pendingExtrinsics"; - let pending_author = rpc_author; + let pending_author = top_pool_author; io_handler.add_sync_method(author_pending_extrinsic_name, move |params: Params| { match params.parse::>() { Ok(shards) => { diff --git a/sidechain/rpc-handler/src/import_block_api.rs b/sidechain/rpc-handler/src/import_block_api.rs index 9e375e74e8..2c4e7d50e6 100644 --- a/sidechain/rpc-handler/src/import_block_api.rs +++ b/sidechain/rpc-handler/src/import_block_api.rs @@ -80,7 +80,7 @@ pub mod tests { #[test] pub fn sidechain_import_block_is_ok() { let io = io_handler(); - let enclave_req = r#"{"jsonrpc":"2.0","method":"sidechain_importBlock","params":[4,0,0,0,0,0,0,0,0,228,0,145,188,97,251,138,131,108,29,6,107,10,152,67,29,148,190,114,167,223,169,197,163,93,228,76,169,171,80,15,209,101,11,211,96,0,0,0,0,83,52,167,255,37,229,185,231,38,66,122,3,55,139,5,190,125,85,94,177,190,99,22,149,92,97,154,30,142,89,24,144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,136,220,52,23,213,5,142,196,180,80,62,12,18,234,26,10,137,190,32,15,233,137,34,66,61,67,52,1,79,166,176,238,0,0,0,175,124,84,84,32,238,162,224,130,203,26,66,7,121,44,59,196,200,100,31,173,226,165,106,187,135,223,149,30,46,191,95,116,203,205,102,100,85,82,74,158,197,166,218,181,130,119,127,162,134,227,129,118,85,123,76,21,113,90,1,160,77,110,15],"id":1}"#; + let enclave_req = r#"{"jsonrpc":"2.0","method":"sidechain_importBlock","params":[4,214,133,100,93,246,175,226,132,221,148,110,92,136,126,102,210,76,31,161,70,134,211,78,164,161,10,161,244,238,152,16,78,27,0,0,0,0,0,0,0,73,116,2,132,31,80,15,47,213,32,2,41,26,189,99,86,235,175,172,213,177,240,105,72,195,176,9,208,70,199,175,14,111,195,7,63,174,8,7,34,199,161,58,166,190,254,107,27,93,9,249,239,10,56,116,174,164,10,185,247,206,95,80,15,200,62,48,254,127,1,0,0,34,46,253,86,156,211,88,25,46,128,216,4,146,5,112,16,125,206,126,34,116,34,61,177,72,2,249,236,225,168,199,27,6,165,15,16,239,22,74,141,152,34,57,191,166,40,181,130,194,202,238,12,244,63,191,131,115,91,89,117,94,49,0,245,0,93,2,38,58,201,204,121,130,251,179,198,127,37,203,37,1,198,62,200,247,175,68,5,83,206,13,146,229,140,187,255,177,22,84,184,68,7,65,168,141,97,207,86,192,169,203,166,205,126,227,238,1,142,81,214,23,245,197,242,73,40,30,103,235,174,202,80,142,248,57,25,98,190,36,106,6,177,153,229,124,145,253,136,80,0,132,19,152,227,134,116,244,125,45,200,160,89,219,24,68,47,239,41,164,159,103,60,111,21,180,251,193,8,188,180,207,212,10,244,187,140,49,161,46,103,199,71,191,207,167,59,98,108,149,234,93,222,228,176,249,126,130,54,226,112,233,235,92,214,100,228,91,55,0,196,187,51,131,66,61,113,185,228,191,189,214,210,69,169,201,223,67,81,250,108,128,172,119,239,90,189,173,189,174,140,196,44,236,75,84,83,37,148,210,128,196,228,214,165,43,92,135,31,170,200,130,176,253,255,62,23,164,183,122,98,163,28,7],"id":1}"#; let response_string = io.handle_request_sync(enclave_req).unwrap(); diff --git a/sidechain/sidechain-crate/Cargo.toml b/sidechain/sidechain-crate/Cargo.toml index 9c223028cb..558286c63b 100644 --- a/sidechain/sidechain-crate/Cargo.toml +++ b/sidechain/sidechain-crate/Cargo.toml @@ -14,9 +14,7 @@ std = [ "its-primitives/std", "its-rpc-handler/std", "its-state/std", - "its-top-pool/std", "its-top-pool-executor/std", - "its-top-pool-rpc-author/std", "its-validateer-fetch/std", ] sgx = [ @@ -26,11 +24,8 @@ sgx = [ "its-consensus-slots/sgx", "its-rpc-handler/sgx", "its-state/sgx", - "its-top-pool/sgx", "its-top-pool-executor/sgx", - "its-top-pool-rpc-author/sgx", ] -test = ["its-top-pool-rpc-author/test"] [dependencies] its-block-composer = { path = "../block-composer", default-features = false } @@ -40,7 +35,5 @@ its-consensus-slots = { path = "../consensus/slots", default-features = false } its-primitives = { path = "../primitives", default-features = false } its-rpc-handler = { path = "../rpc-handler", default-features = false } its-state = { path = "../state", default-features = false } -its-top-pool = { path = "../top-pool", default-features = false } its-top-pool-executor = { path = "../top-pool-executor", default-features = false } -its-top-pool-rpc-author = { path = "../top-pool-rpc-author", default-features = false } its-validateer-fetch = { path = "../validateer-fetch", default-features = false } diff --git a/sidechain/sidechain-crate/src/lib.rs b/sidechain/sidechain-crate/src/lib.rs index 28c1d35f5b..7bc7254152 100644 --- a/sidechain/sidechain-crate/src/lib.rs +++ b/sidechain/sidechain-crate/src/lib.rs @@ -36,10 +36,6 @@ pub use its_rpc_handler as rpc_handler; pub use its_state as state; -pub use its_top_pool as top_pool; - pub use its_top_pool_executor as top_pool_executor; -pub use its_top_pool_rpc_author as top_pool_rpc_author; - pub use its_validateer_fetch as validateer_fetch; diff --git a/sidechain/state/Cargo.toml b/sidechain/state/Cargo.toml index fdf502573c..0a53c32ce7 100644 --- a/sidechain/state/Cargo.toml +++ b/sidechain/state/Cargo.toml @@ -44,8 +44,8 @@ sgx = [ ] [dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive", "chain-error"] } -frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "chain-error"] } +frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} log = { version = "0.4", default-features = false } serde = { version = "1.0", default-features = false, features = ["alloc", "derive"] } @@ -67,9 +67,9 @@ sgx-externalities = { default-features = false, git = "https://github.com/ajuna- sp-io = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master", features = ["disable_oom", "disable_panic_handler", "disable_allocator"], optional = true } # substrate deps -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # test deps [dev-dependencies] -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} diff --git a/sidechain/storage/Cargo.toml b/sidechain/storage/Cargo.toml index 4ce728f17b..afe4e06c35 100644 --- a/sidechain/storage/Cargo.toml +++ b/sidechain/storage/Cargo.toml @@ -6,7 +6,7 @@ edition = "2018" [dependencies] # crate.io -codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", features = ["derive"] } log = "0.4" parking_lot = "0.11.1" rocksdb = "0.17.0" @@ -16,7 +16,7 @@ thiserror = "1.0" its-primitives = { path = "../primitives" } # Substrate dependencies -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master" } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } [dev-dependencies] # crate.io @@ -25,6 +25,7 @@ temp-dir = "0.1" # local itp-types = { path = "../../core-primitives/types" } its-test = { path = "../test" } +itp-time-utils = { path = "../../core-primitives/time-utils", default-features = false } [features] mocks = [] diff --git a/sidechain/storage/src/storage.rs b/sidechain/storage/src/storage.rs index 899a6983e7..a565bbc348 100644 --- a/sidechain/storage/src/storage.rs +++ b/sidechain/storage/src/storage.rs @@ -18,7 +18,7 @@ use super::{db::SidechainDB, Error, Result}; use codec::{Decode, Encode}; use its_primitives::{ - traits::{Block as BlockT, SignedBlock as SignedBlockT}, + traits::{Block as BlockTrait, Header as HeaderTrait, SignedBlock as SignedBlockT}, types::{BlockHash, BlockNumber}, }; use log::*; @@ -31,7 +31,9 @@ const LAST_BLOCK_KEY: &[u8] = b"last_sidechainblock"; const STORED_SHARDS_KEY: &[u8] = b"stored_shards"; /// ShardIdentifier type -type ShardIdentifierFor = <::Block as BlockT>::ShardIdentifier; +type ShardIdentifierFor = + <<::Block as BlockTrait>::HeaderType as HeaderTrait>::ShardIdentifier; + /// Helper struct, contains the blocknumber /// and blockhash of the last sidechain block #[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug, Default)] @@ -125,7 +127,7 @@ impl SidechainStorage { let mut current_block = latest_block; let mut blocks_to_return = Vec::::new(); while ¤t_block.hash() != block_hash { - let parent_block_hash = current_block.block().parent_hash(); + let parent_block_hash = current_block.block().header().parent_hash(); blocks_to_return.push(current_block); @@ -233,7 +235,7 @@ impl SidechainStorage { new_shard: &mut bool, batch: &mut WriteBatch, ) -> Result<()> { - let shard = &signed_block.block().shard_id(); + let shard = &signed_block.block().header().shard_id(); if self.shards.contains(shard) { if !self.verify_block_ancestry(signed_block.block()) { // Do not include block if its not a direct ancestor of the last block in line. @@ -249,8 +251,8 @@ impl SidechainStorage { } fn verify_block_ancestry(&self, block: &::Block) -> bool { - let shard = &block.shard_id(); - let current_block_nr = block.block_number(); + let shard = &block.header().shard_id(); + let current_block_nr = block.header().block_number(); if let Some(last_block) = self.last_block_of_shard(shard) { if last_block.number != current_block_nr - 1 { error!("[Sidechain DB] Sidechainblock (nr: {:?}) is not a succession of the previous block (nr: {:?}) in shard: {:?}", @@ -307,8 +309,8 @@ impl SidechainStorage { /// Adds the block to the WriteBatch. fn add_last_block(&mut self, batch: &mut WriteBatch, block: &SignedBlock) { let hash = block.hash(); - let block_number = block.block().block_number(); - let shard = block.block().shard_id(); + let block_number = block.block().header().block_number(); + let shard = block.block().header().shard_id(); // Block hash -> Signed Block. SidechainDB::add_to_batch(batch, hash, block); @@ -370,10 +372,7 @@ mod test { create_signed_block_with_shard as create_signed_block, create_temp_dir, get_storage, }; use itp_types::ShardIdentifier; - use its_primitives::{ - traits::{Block as BlockT, SignedBlock as SignedBlockT}, - types::SignedBlock, - }; + use its_primitives::{traits::SignedBlock as SignedBlockT, types::SignedBlock}; use sp_core::H256; #[test] @@ -409,7 +408,7 @@ mod test { let signed_block = create_signed_block(20, shard); let signed_last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; // when { @@ -438,7 +437,7 @@ mod test { let signed_block = create_signed_block(20, shard); let signed_last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; { let mut sidechain_db = get_storage(temp_dir.path().to_path_buf()); @@ -472,7 +471,7 @@ mod test { // ensure DB contains previously stored data: let last_block = sidechain_db.last_block_of_shard(&shard).unwrap(); - assert_eq!(last_block.number, signed_block.block().block_number()); + assert_eq!(last_block.number, signed_block.block().header().block_number()); assert_eq!(last_block.hash, signed_block.hash()); let stored_block_hash = sidechain_db.get_block_hash(&shard, last_block.number).unwrap().unwrap(); @@ -492,14 +491,14 @@ mod test { sidechain_db.db.put(signed_block.hash(), signed_block.clone()).unwrap(); sidechain_db .db - .put((shard, signed_block.block().block_number()), signed_block.hash()) + .put((shard, signed_block.block().header().block_number()), signed_block.hash()) .unwrap(); assert_eq!( sidechain_db .db .get::<(ShardIdentifier, BlockNumber), H256>(( shard, - signed_block.block().block_number() + signed_block.block().header().block_number() )) .unwrap() .unwrap(), @@ -515,7 +514,7 @@ mod test { sidechain_db.delete_block( &mut batch, &signed_block.hash(), - &signed_block.block().block_number(), + &signed_block.block().header().block_number(), &shard, ); sidechain_db.db.write(batch).unwrap(); @@ -529,7 +528,7 @@ mod test { .db .get::<(ShardIdentifier, BlockNumber), H256>(( shard, - signed_block.block().block_number() + signed_block.block().header().block_number() )) .unwrap() .is_none()); @@ -548,7 +547,7 @@ mod test { let signed_block = create_signed_block(8, shard); let last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; { // fill db @@ -556,7 +555,7 @@ mod test { sidechain_db.db.put(signed_block.hash(), signed_block.clone()).unwrap(); sidechain_db .db - .put((shard, signed_block.block().block_number()), signed_block.hash()) + .put((shard, signed_block.block().header().block_number()), signed_block.hash()) .unwrap(); sidechain_db.db.put((LAST_BLOCK_KEY, shard), last_block.clone()).unwrap(); assert_eq!( @@ -564,7 +563,7 @@ mod test { .db .get::<(ShardIdentifier, BlockNumber), H256>(( shard, - signed_block.block().block_number() + signed_block.block().header().block_number() )) .unwrap() .unwrap(), @@ -594,7 +593,7 @@ mod test { .db .get::<(ShardIdentifier, BlockNumber), H256>(( shard, - signed_block.block().block_number() + signed_block.block().header().block_number() )) .unwrap() .is_none()); @@ -618,7 +617,7 @@ mod test { let signed_block = create_signed_block(8, shard); let last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; let signed_block_two = create_signed_block(9, shard); { @@ -640,7 +639,7 @@ mod test { let signed_block = create_signed_block(8, shard); let last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; let signed_block_two = create_signed_block(5, shard); { @@ -712,7 +711,7 @@ mod test { let signed_block = create_signed_block(8, shard); let last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; let signed_block_two = create_signed_block(9, shard); let mut new_shard = false; @@ -739,7 +738,7 @@ mod test { let signed_block = create_signed_block(8, shard); let last_block = LastSidechainBlock { hash: signed_block.hash(), - number: signed_block.block().block_number(), + number: signed_block.block().header().block_number(), }; let signed_block_two = create_signed_block(10, shard); let mut new_shard = false; @@ -778,7 +777,7 @@ mod test { // ensure DB contains previously stored data: assert_eq!(*updated_sidechain_db.shards(), vec![shard]); let last_block = updated_sidechain_db.last_block_of_shard(&shard).unwrap(); - assert_eq!(last_block.number, signed_block.block().block_number()); + assert_eq!(last_block.number, signed_block.block().header().block_number()); assert_eq!(last_block.hash, signed_block.hash()); let stored_block_hash = updated_sidechain_db.get_block_hash(&shard, last_block.number).unwrap().unwrap(); @@ -892,16 +891,16 @@ mod test { // last block is equal to first block: let last_block: &LastSidechainBlock = updated_sidechain_db.last_blocks.get(&shard).unwrap(); - assert_eq!(last_block.number, signed_block_one.block().block_number()); + assert_eq!(last_block.number, signed_block_one.block().header().block_number()); // storage contains only one blocks: // (shard,blocknumber) -> blockhash let db_block_hash_one = updated_sidechain_db - .get_block_hash(&shard, signed_block_one.block().block_number()) + .get_block_hash(&shard, signed_block_one.block().header().block_number()) .unwrap() .unwrap(); let db_block_hash_empty = updated_sidechain_db - .get_block_hash(&shard, signed_block_two.block().block_number()) + .get_block_hash(&shard, signed_block_two.block().header().block_number()) .unwrap(); assert!(db_block_hash_empty.is_none()); assert_eq!(db_block_hash_one, signed_block_one.hash()); @@ -920,12 +919,12 @@ mod test { // create last block one for comparison let last_block = LastSidechainBlock { hash: signed_block_one.hash(), - number: signed_block_one.block().block_number(), + number: signed_block_one.block().header().block_number(), }; // then let some_block = sidechain_db - .get_previous_block(&shard, signed_block_one.block().block_number() + 1) + .get_previous_block(&shard, signed_block_one.block().header().block_number() + 1) .unwrap() .unwrap(); @@ -994,7 +993,7 @@ mod test { let block_three = create_signed_block(3, shard); let last_block = LastSidechainBlock { hash: block_three.hash(), - number: block_three.block().block_number(), + number: block_three.block().header().block_number(), }; { @@ -1077,7 +1076,7 @@ mod test { let block_three = create_signed_block(3, shard_one); let last_block_one = LastSidechainBlock { hash: block_three.hash(), - number: block_three.block().block_number(), + number: block_three.block().header().block_number(), }; // shard two let shard_two = H256::from_low_u64_be(2); @@ -1087,7 +1086,7 @@ mod test { let block_four_s = create_signed_block(4, shard_two); let last_block_two = LastSidechainBlock { hash: block_four_s.hash(), - number: block_four_s.block().block_number(), + number: block_four_s.block().header().block_number(), }; { // create sidechain_db diff --git a/sidechain/storage/src/test_utils.rs b/sidechain/storage/src/test_utils.rs index 6125e9bc1d..c5c1d694d0 100644 --- a/sidechain/storage/src/test_utils.rs +++ b/sidechain/storage/src/test_utils.rs @@ -16,15 +16,16 @@ */ use crate::storage::SidechainStorage; +use itp_time_utils::now_as_u64; use itp_types::ShardIdentifier; use its_primitives::types::{BlockHash, SignedBlock as SignedSidechainBlock}; -use its_test::sidechain_block_builder::SidechainBlockBuilder; -use sp_core::{crypto::Pair, ed25519, H256}; -use std::{ - path::PathBuf, - time::{SystemTime, UNIX_EPOCH}, - vec::Vec, +use its_test::{ + sidechain_block_builder::SidechainBlockBuilder, + sidechain_block_data_builder::SidechainBlockDataBuilder, + sidechain_header_builder::SidechainHeaderBuilder, }; +use sp_core::{crypto::Pair, ed25519, H256}; +use std::{path::PathBuf, vec::Vec}; use temp_dir::TempDir; pub fn fill_storage_with_blocks(blocks: Vec) -> TempDir { @@ -50,9 +51,16 @@ pub fn create_signed_block_with_parenthash( block_number: u64, parent_hash: BlockHash, ) -> SignedSidechainBlock { - default_block_builder() + let header = default_header_builder() .with_parent_hash(parent_hash) - .with_number(block_number) + .with_block_number(block_number) + .build(); + + let block_data = default_block_data_builder().build(); + + SidechainBlockBuilder::default() + .with_header(header) + .with_block_data(block_data) .build_signed() } @@ -60,18 +68,29 @@ pub fn create_signed_block_with_shard( block_number: u64, shard: ShardIdentifier, ) -> SignedSidechainBlock { - default_block_builder() + let header = default_header_builder() .with_shard(shard) - .with_number(block_number) + .with_block_number(block_number) + .build(); + + let block_data = default_block_data_builder().build(); + + SidechainBlockBuilder::default() + .with_header(header) + .with_block_data(block_data) .build_signed() } -fn default_block_builder() -> SidechainBlockBuilder { - SidechainBlockBuilder::default() - .with_signer(ed25519::Pair::from_string("//Alice", None).unwrap()) +fn default_header_builder() -> SidechainHeaderBuilder { + SidechainHeaderBuilder::default() .with_parent_hash(H256::random()) - .with_parentchain_block_hash(H256::random()) - .with_timestamp(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64) + .with_block_number(Default::default()) .with_shard(default_shard()) - .with_number(Default::default()) +} + +fn default_block_data_builder() -> SidechainBlockDataBuilder { + SidechainBlockDataBuilder::default() + .with_timestamp(now_as_u64()) + .with_layer_one_head(H256::random()) + .with_signer(ed25519::Pair::from_string("//Alice", None).unwrap()) } diff --git a/sidechain/test/Cargo.toml b/sidechain/test/Cargo.toml index 9ce056ca96..aab610d472 100644 --- a/sidechain/test/Cargo.toml +++ b/sidechain/test/Cargo.toml @@ -5,6 +5,8 @@ authors = ["Integritee AG "] edition = "2018" [dependencies] +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false } + # sgx dependencies sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sdk.git", optional = true } @@ -14,17 +16,20 @@ itp-types = { path = "../../core-primitives/types", default-features = false } its-primitives = { path = "../primitives", default-features = false } # Substrate dependencies -sp-core = { version = "5.0.0", git = "https://github.com/paritytech/substrate.git", branch = "master", default_features = false } +sp-core = { version = "6.0.0", git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19", default_features = false } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] default = ["std"] std = [ + "codec/std", "itp-time-utils/std", "itp-types/std", "its-primitives/std", # substrate "sp-core/std", + "sp-runtime/std", ] sgx = [ "itp-time-utils/sgx", diff --git a/sidechain/test/src/lib.rs b/sidechain/test/src/lib.rs index 9a3a7b6f62..f38bb989a7 100644 --- a/sidechain/test/src/lib.rs +++ b/sidechain/test/src/lib.rs @@ -21,3 +21,5 @@ compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the sam extern crate sgx_tstd as std; pub mod sidechain_block_builder; +pub mod sidechain_block_data_builder; +pub mod sidechain_header_builder; diff --git a/sidechain/test/src/sidechain_block_builder.rs b/sidechain/test/src/sidechain_block_builder.rs index 3aef49a6b5..bf9901add1 100644 --- a/sidechain/test/src/sidechain_block_builder.rs +++ b/sidechain/test/src/sidechain_block_builder.rs @@ -18,14 +18,13 @@ //! Builder pattern for a signed sidechain block. -use itp_time_utils; -use itp_types::H256; +use crate::{ + sidechain_block_data_builder::SidechainBlockDataBuilder, + sidechain_header_builder::SidechainHeaderBuilder, +}; use its_primitives::{ - traits::{Block as BlockTrait, SignBlock}, - types::{ - block::{BlockHash, BlockNumber, Timestamp}, - Block, ShardIdentifier, SignedBlock, - }, + traits::SignBlock, + types::{block_data::BlockData, header::Header, Block, SignedBlock}, }; use sp_core::{ed25519, Pair}; @@ -34,26 +33,16 @@ const ENCLAVE_SEED: Seed = *b"12345678901234567890123456789012"; pub struct SidechainBlockBuilder { signer: ed25519::Pair, - number: BlockNumber, - parent_hash: BlockHash, - parentchain_block_hash: H256, - signed_top_hashes: Vec, - encrypted_payload: Vec, - shard: ShardIdentifier, - timestamp: Timestamp, + header: Header, + block_data: BlockData, } impl Default for SidechainBlockBuilder { fn default() -> Self { SidechainBlockBuilder { signer: Pair::from_seed(&ENCLAVE_SEED), - number: 1, - parent_hash: BlockHash::default(), - parentchain_block_hash: Default::default(), - signed_top_hashes: Default::default(), - encrypted_payload: Default::default(), - shard: Default::default(), - timestamp: Default::default(), + header: SidechainHeaderBuilder::default().build(), + block_data: SidechainBlockDataBuilder::default().build(), } } } @@ -62,71 +51,33 @@ impl SidechainBlockBuilder { pub fn random() -> Self { SidechainBlockBuilder { signer: Pair::from_seed(&ENCLAVE_SEED), - number: 42, - parent_hash: BlockHash::random(), - parentchain_block_hash: BlockHash::random(), - signed_top_hashes: vec![H256::random(), H256::random()], - encrypted_payload: vec![1, 3, 42, 8, 11, 33], - shard: ShardIdentifier::random(), - timestamp: itp_time_utils::now_as_u64(), + header: SidechainHeaderBuilder::random().build(), + block_data: SidechainBlockDataBuilder::random().build(), } } - pub fn with_signer(mut self, signer: ed25519::Pair) -> Self { - self.signer = signer; - self - } - pub fn with_number(mut self, number: BlockNumber) -> Self { - self.number = number; + pub fn with_header(mut self, header: Header) -> Self { + self.header = header; self } - pub fn with_parent_hash(mut self, parent_hash: BlockHash) -> Self { - self.parent_hash = parent_hash; + pub fn with_block_data(mut self, block_data: BlockData) -> Self { + self.block_data = block_data; self } - pub fn with_parentchain_block_hash(mut self, parentchain_block_hash: H256) -> Self { - self.parentchain_block_hash = parentchain_block_hash; - self - } - - pub fn with_signed_top_hashes(mut self, signed_top_hashes: Vec) -> Self { - self.signed_top_hashes = signed_top_hashes; - self - } - - pub fn with_payload(mut self, payload: Vec) -> Self { - self.encrypted_payload = payload; - self - } - - pub fn with_shard(mut self, shard: ShardIdentifier) -> Self { - self.shard = shard; - self - } - - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.timestamp = timestamp; + pub fn with_signer(mut self, signer: ed25519::Pair) -> Self { + self.signer = signer; self } - pub fn build(&self) -> Block { - Block::new( - self.signer.public(), - self.number, - self.parent_hash, - self.parentchain_block_hash, - self.shard, - self.signed_top_hashes.clone(), - self.encrypted_payload.clone(), - self.timestamp, - ) + pub fn build(self) -> Block { + Block { header: self.header, block_data: self.block_data } } - pub fn build_signed(&self) -> SignedBlock { - let signer = &self.signer.clone(); - self.build().sign_block(signer) + pub fn build_signed(self) -> SignedBlock { + let signer = self.signer.clone(); + self.build().sign_block(&signer) } } diff --git a/sidechain/test/src/sidechain_block_data_builder.rs b/sidechain/test/src/sidechain_block_data_builder.rs new file mode 100644 index 0000000000..1681375e9a --- /dev/null +++ b/sidechain/test/src/sidechain_block_data_builder.rs @@ -0,0 +1,97 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + Copyright (C) 2017-2019 Baidu, Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +//! Builder pattern for sidechain block data. + +use itp_time_utils; +use itp_types::H256; +use its_primitives::types::{ + block::{BlockHash, Timestamp}, + block_data::BlockData, +}; +use sp_core::{ed25519, Pair}; + +type Seed = [u8; 32]; +const ENCLAVE_SEED: Seed = *b"12345678901234567890123456789012"; + +pub struct SidechainBlockDataBuilder { + timestamp: Timestamp, + layer_one_head: H256, + signer: ed25519::Pair, + signed_top_hashes: Vec, + encrypted_state_diff: Vec, +} + +impl Default for SidechainBlockDataBuilder { + fn default() -> Self { + SidechainBlockDataBuilder { + timestamp: Default::default(), + layer_one_head: Default::default(), + signer: Pair::from_seed(&ENCLAVE_SEED), + signed_top_hashes: Default::default(), + encrypted_state_diff: Default::default(), + } + } +} + +impl SidechainBlockDataBuilder { + pub fn random() -> Self { + SidechainBlockDataBuilder { + timestamp: itp_time_utils::now_as_u64(), + layer_one_head: BlockHash::random(), + signer: Pair::from_seed(&ENCLAVE_SEED), + signed_top_hashes: vec![H256::random(), H256::random()], + encrypted_state_diff: vec![1, 3, 42, 8, 11, 33], + } + } + + pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { + self.timestamp = timestamp; + self + } + + pub fn with_signer(mut self, signer: ed25519::Pair) -> Self { + self.signer = signer; + self + } + + pub fn with_layer_one_head(mut self, layer_one_head: H256) -> Self { + self.layer_one_head = layer_one_head; + self + } + + pub fn with_signed_top_hashes(mut self, signed_top_hashes: Vec) -> Self { + self.signed_top_hashes = signed_top_hashes; + self + } + + pub fn with_payload(mut self, payload: Vec) -> Self { + self.encrypted_state_diff = payload; + self + } + + pub fn build(self) -> BlockData { + BlockData { + timestamp: self.timestamp, + block_author: self.signer.public(), + layer_one_head: self.layer_one_head, + signed_top_hashes: self.signed_top_hashes, + encrypted_state_diff: self.encrypted_state_diff, + } + } +} diff --git a/sidechain/test/src/sidechain_header_builder.rs b/sidechain/test/src/sidechain_header_builder.rs new file mode 100644 index 0000000000..37b041e0d1 --- /dev/null +++ b/sidechain/test/src/sidechain_header_builder.rs @@ -0,0 +1,80 @@ +/* + Copyright 2021 Integritee AG and Supercomputing Systems AG + Copyright (C) 2017-2019 Baidu, Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +*/ + +//! Builder pattern for a sidechain header. + +use its_primitives::types::{header::Header, ShardIdentifier}; +use sp_core::H256; + +pub struct SidechainHeaderBuilder { + parent_hash: H256, + block_number: u64, + shard_id: ShardIdentifier, + block_data_hash: H256, +} + +impl Default for SidechainHeaderBuilder { + fn default() -> Self { + SidechainHeaderBuilder { + parent_hash: Default::default(), + block_number: 1, + shard_id: Default::default(), + block_data_hash: Default::default(), + } + } +} + +impl SidechainHeaderBuilder { + pub fn random() -> Self { + SidechainHeaderBuilder { + parent_hash: H256::random(), + block_number: 42, + shard_id: ShardIdentifier::random(), + block_data_hash: H256::random(), + } + } + + pub fn with_parent_hash(mut self, parent_hash: H256) -> Self { + self.parent_hash = parent_hash; + self + } + + pub fn with_block_number(mut self, block_number: u64) -> Self { + self.block_number = block_number; + self + } + + pub fn with_shard(mut self, shard_id: ShardIdentifier) -> Self { + self.shard_id = shard_id; + self + } + + pub fn with_block_data_hash(mut self, block_data_hash: H256) -> Self { + self.block_data_hash = block_data_hash; + self + } + + pub fn build(self) -> Header { + Header { + parent_hash: self.parent_hash, + block_number: self.block_number, + shard_id: self.shard_id, + block_data_hash: self.block_data_hash, + } + } +} diff --git a/sidechain/top-pool-executor/Cargo.toml b/sidechain/top-pool-executor/Cargo.toml index c10dd132af..f067737b9e 100644 --- a/sidechain/top-pool-executor/Cargo.toml +++ b/sidechain/top-pool-executor/Cargo.toml @@ -14,10 +14,10 @@ sgx_tstd = { branch = "master", git = "https://github.com/apache/teaclave-sgx-sd ita-stf = { path = "../../app-libs/stf", default-features = false } itp-stf-executor = { path = "../../core-primitives/stf-executor", default-features = false } itp-time-utils = { path = "../../core-primitives/time-utils", default-features = false } +itp-top-pool-author = { path = "../../core-primitives/top-pool-author", default-features = false } itp-types = { path = "../../core-primitives/types", default-features = false } its-primitives = { path = "../primitives", default-features = false } its-state = { path = "../state", default-features = false } -its-top-pool-rpc-author = { path = "../top-pool-rpc-author", default-features = false } # integritee dependencies sgx-externalities = { default-features = false, git = "https://github.com/ajuna-network/sgx-runtime", branch = "master" } @@ -29,12 +29,12 @@ thiserror_sgx = { package = "thiserror", git = "https://github.com/mesalock-linu thiserror = { version = "1.0", optional = true } # no-std compatible libraries -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } # substrate -sp-core = { version = "5.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "master" } -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +sp-core = { version = "6.0.0", default-features = false, features = ["full_crypto"], git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19" } +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} [features] @@ -44,10 +44,11 @@ std = [ "ita-stf/std", "itp-stf-executor/std", "itp-time-utils/std", + "itp-top-pool-author/std", "itp-types/std", "its-primitives/std", "its-state/std", - "its-top-pool-rpc-author/std", + # crate.io "codec/std", "log/std", @@ -63,9 +64,9 @@ sgx = [ "ita-stf/sgx", "itp-stf-executor/sgx", "itp-time-utils/sgx", + "itp-top-pool-author/sgx", "itp-types/sgx", "its-state/sgx", - "its-top-pool-rpc-author/sgx", "thiserror_sgx", ] diff --git a/sidechain/top-pool-executor/src/call_operator.rs b/sidechain/top-pool-executor/src/call_operator.rs index 98563e611a..1bf628b78b 100644 --- a/sidechain/top-pool-executor/src/call_operator.rs +++ b/sidechain/top-pool-executor/src/call_operator.rs @@ -18,12 +18,13 @@ use crate::{error::Result, TopPoolOperationHandler}; use ita_stf::{TrustedCallSigned, TrustedOperation}; use itp_stf_executor::traits::{StateUpdateProposer, StfExecuteTimedGettersBatch}; -use itp_types::{ShardIdentifier, H256}; +use itp_top_pool_author::traits::{AuthorApi, OnBlockImported, SendState}; +use itp_types::H256; use its_primitives::traits::{ - Block as SidechainBlockTrait, ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait, + Block as SidechainBlockTrait, BlockData, Header as HeaderTrait, ShardIdentifierFor, + SignedBlock as SignedSidechainBlockTrait, }; use its_state::{SidechainState, SidechainSystemExt, StateHash}; -use its_top_pool_rpc_author::traits::{AuthorApi, OnBlockCreated, SendState}; use log::*; use sgx_externalities::SgxExternalitiesTrait; use sp_runtime::{traits::Block as ParentchainBlockTrait, MultiSignature}; @@ -52,8 +53,11 @@ pub trait TopPoolCallOperator< fn remove_calls_from_pool( &self, shard: &ShardIdentifierFor, - calls: Vec, + executed_calls: Vec, ) -> Vec; + + // Notify pool about block import for status updates + fn on_block_imported(&self, block: &SignedSidechainBlock::Block); } impl @@ -63,32 +67,36 @@ where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, + <::Block as SidechainBlockTrait>::HeaderType: + HeaderTrait, RpcAuthor: AuthorApi - + OnBlockCreated + + OnBlockImported + SendState, StfExecutor: StateUpdateProposer + StfExecuteTimedGettersBatch, ::Externalities: SgxExternalitiesTrait + SidechainState + SidechainSystemExt + StateHash, { - fn get_trusted_calls(&self, shard: &ShardIdentifier) -> Result> { - Ok(self.rpc_author.get_pending_tops_separated(*shard)?.0) + fn get_trusted_calls( + &self, + shard: &ShardIdentifierFor, + ) -> Result> { + Ok(self.top_pool_author.get_pending_tops_separated(*shard)?.0) } fn get_trusted_call_hash(&self, call: &TrustedCallSigned) -> H256 { let top: TrustedOperation = TrustedOperation::direct_call(call.clone()); - self.rpc_author.hash_of(&top) + self.top_pool_author.hash_of(&top) } fn remove_calls_from_pool( &self, - shard: &ShardIdentifier, + shard: &ShardIdentifierFor, executed_calls: Vec, ) -> Vec { let mut failed_to_remove = Vec::new(); for executed_call in executed_calls { - if let Err(e) = self.rpc_author.remove_top( + if let Err(e) = self.top_pool_author.remove_top( vec![executed_call.trusted_operation_or_hash.clone()], *shard, executed_call.is_success(), @@ -101,4 +109,9 @@ where } failed_to_remove } + + fn on_block_imported(&self, block: &SignedSidechainBlock::Block) { + self.top_pool_author + .on_block_imported(block.block_data().signed_top_hashes(), block.hash()); + } } diff --git a/sidechain/top-pool-executor/src/call_operator_mock.rs b/sidechain/top-pool-executor/src/call_operator_mock.rs index 8f29974b19..d03332dd95 100644 --- a/sidechain/top-pool-executor/src/call_operator_mock.rs +++ b/sidechain/top-pool-executor/src/call_operator_mock.rs @@ -20,11 +20,13 @@ use crate::{ call_operator::{ExecutedOperation, TopPoolCallOperator}, error::Result, + H256, }; +use codec::Encode; use core::marker::PhantomData; -use ita_stf::TrustedCallSigned; +use ita_stf::{TrustedCallSigned, TrustedOperation}; use its_primitives::traits::{ShardIdentifierFor, SignedBlock as SignedSidechainBlockTrait}; -use sp_runtime::traits::Block as ParentchainBlockTrait; +use sp_runtime::traits::{BlakeTwo256, Block as ParentchainBlockTrait, Hash}; use std::{collections::HashMap, sync::RwLock}; pub struct TopPoolCallOperatorMock @@ -88,6 +90,11 @@ where Ok(self.trusted_calls.get(shard).map(|v| v.clone()).unwrap_or_default()) } + fn get_trusted_call_hash(&self, call: &TrustedCallSigned) -> H256 { + let top: TrustedOperation = TrustedOperation::direct_call(call.clone()); + top.using_encoded(|x| BlakeTwo256::hash(x)) + } + fn remove_calls_from_pool( &self, shard: &ShardIdentifierFor, @@ -97,4 +104,9 @@ where remove_call_invoked_lock.push((*shard, calls)); Default::default() } + + fn on_block_imported(&self, _block: &SignedSidechainBlock::Block) { + // Do nothing for now + // FIXME: We should include unit tests to see if pool is notified about block import + } } diff --git a/sidechain/top-pool-executor/src/error.rs b/sidechain/top-pool-executor/src/error.rs index cb63ac70e2..a6dd1bca55 100644 --- a/sidechain/top-pool-executor/src/error.rs +++ b/sidechain/top-pool-executor/src/error.rs @@ -31,7 +31,7 @@ pub enum Error { #[error("STF execution error: {0}")] StfExecution(#[from] itp_stf_executor::error::Error), #[error("TOP pool RPC author error: {0}")] - TopPoolRpcAuthor(#[from] its_top_pool_rpc_author::error::Error), + TopPoolAuthor(#[from] itp_top_pool_author::error::Error), #[error(transparent)] Other(#[from] Box), } diff --git a/sidechain/top-pool-executor/src/getter_operator.rs b/sidechain/top-pool-executor/src/getter_operator.rs index 9d1719b580..a1525ac124 100644 --- a/sidechain/top-pool-executor/src/getter_operator.rs +++ b/sidechain/top-pool-executor/src/getter_operator.rs @@ -22,12 +22,12 @@ use crate::{ use codec::Encode; use ita_stf::{hash::TrustedOperationOrHash, TrustedGetterSigned}; use itp_stf_executor::traits::{StateUpdateProposer, StfExecuteTimedGettersBatch}; +use itp_top_pool_author::traits::{AuthorApi, OnBlockImported, SendState}; use itp_types::{ShardIdentifier, H256}; use its_primitives::traits::{ Block as SidechainBlockTrait, SignedBlock as SignedSidechainBlockTrait, }; use its_state::{SidechainState, SidechainSystemExt, StateHash}; -use its_top_pool_rpc_author::traits::{AuthorApi, OnBlockCreated, SendState}; use log::*; use sgx_externalities::SgxExternalitiesTrait; use sp_runtime::{traits::Block as ParentchainBlockTrait, MultiSignature}; @@ -61,10 +61,9 @@ where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: - SidechainBlockTrait, + SignedSidechainBlock::Block: SidechainBlockTrait, RpcAuthor: AuthorApi - + OnBlockCreated + + OnBlockImported + SendState, StfExecutor: StateUpdateProposer + StfExecuteTimedGettersBatch, ::Externalities: @@ -85,13 +84,13 @@ where |trusted_getter_signed: &TrustedGetterSigned, state_result: StfExecutorResult>>| { let hash_of_getter = - self.rpc_author.hash_of(&trusted_getter_signed.clone().into()); + self.top_pool_author.hash_of(&trusted_getter_signed.clone().into()); match state_result { Ok(r) => { // let client know of current state trace!("Updating client"); - match self.rpc_author.send_state(hash_of_getter, r.encode()) { + match self.top_pool_author.send_state(hash_of_getter, r.encode()) { Ok(_) => trace!("Successfully updated client"), Err(e) => error!("Could not send state to client {:?}", e), } @@ -114,7 +113,7 @@ where } fn get_trusted_getters(&self, shard: &ShardIdentifier) -> Result> { - Ok(self.rpc_author.get_pending_tops_separated(*shard)?.1) + Ok(self.top_pool_author.get_pending_tops_separated(*shard)?.1) } fn remove_getter_from_pool( @@ -122,6 +121,6 @@ where shard: &ShardIdentifier, getter: TrustedOperationOrHash, ) -> Result> { - Ok(self.rpc_author.remove_top(vec![getter], *shard, false)?) + Ok(self.top_pool_author.remove_top(vec![getter], *shard, false)?) } } diff --git a/sidechain/top-pool-executor/src/lib.rs b/sidechain/top-pool-executor/src/lib.rs index 03266822a0..46da3f35f3 100644 --- a/sidechain/top-pool-executor/src/lib.rs +++ b/sidechain/top-pool-executor/src/lib.rs @@ -49,12 +49,12 @@ pub use call_operator::TopPoolCallOperator; pub use getter_operator::TopPoolGetterOperator; use itp_stf_executor::traits::{StateUpdateProposer, StfExecuteTimedGettersBatch}; +use itp_top_pool_author::traits::{AuthorApi, OnBlockImported, SendState}; use itp_types::H256; use its_primitives::traits::{ Block as SidechainBlockTrait, SignedBlock as SignedSidechainBlockTrait, }; use its_state::{SidechainState, SidechainSystemExt, StateHash}; -use its_top_pool_rpc_author::traits::{AuthorApi, OnBlockCreated, SendState}; use sgx_externalities::SgxExternalitiesTrait; use sp_runtime::{traits::Block as ParentchainBlockTrait, MultiSignature}; use std::{marker::PhantomData, sync::Arc}; @@ -62,28 +62,32 @@ use std::{marker::PhantomData, sync::Arc}; /// Executes operations on the top pool /// /// Operations can either be Getters or Calls -pub struct TopPoolOperationHandler { - rpc_author: Arc, +pub struct TopPoolOperationHandler< + ParentchainBlock, + SignedSidechainBlock, + TopPoolAuthor, + StfExecutor, +> { + top_pool_author: Arc, stf_executor: Arc, _phantom: PhantomData<(ParentchainBlock, SignedSidechainBlock)>, } -impl - TopPoolOperationHandler +impl + TopPoolOperationHandler where ParentchainBlock: ParentchainBlockTrait, SignedSidechainBlock: SignedSidechainBlockTrait, - SignedSidechainBlock::Block: - SidechainBlockTrait, - RpcAuthor: AuthorApi - + OnBlockCreated + SignedSidechainBlock::Block: SidechainBlockTrait, + TopPoolAuthor: AuthorApi + + OnBlockImported + SendState, StfExecutor: StateUpdateProposer + StfExecuteTimedGettersBatch, ::Externalities: SgxExternalitiesTrait + SidechainState + SidechainSystemExt + StateHash, { - pub fn new(rpc_author: Arc, stf_executor: Arc) -> Self { - TopPoolOperationHandler { rpc_author, stf_executor, _phantom: Default::default() } + pub fn new(top_pool_author: Arc, stf_executor: Arc) -> Self { + TopPoolOperationHandler { top_pool_author, stf_executor, _phantom: Default::default() } } } diff --git a/sidechain/top-pool-rpc-author/src/initializer.rs b/sidechain/top-pool-rpc-author/src/initializer.rs deleted file mode 100644 index 3321aa76dd..0000000000 --- a/sidechain/top-pool-rpc-author/src/initializer.rs +++ /dev/null @@ -1,55 +0,0 @@ -/* - Copyright 2021 Integritee AG and Supercomputing Systems AG - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - -*/ - -use crate::{ - api::SidechainApi, - author::{Author, AuthorTopFilter}, - pool_types::{BPool, EnclaveRpcConnectionRegistry}, -}; -use itc_direct_rpc_server::rpc_responder::RpcResponder; -use itp_ocall_api::EnclaveMetricsOCallApi; -use itp_sgx_crypto::ShieldingCrypto; -use itp_stf_state_handler::query_shard_state::QueryShardState; -use its_top_pool::pool::Options as PoolOptions; -use std::sync::Arc; - -pub type SidechainRpcAuthor = - Author; - -/// Initialize the author components. -/// -/// Creates and initializes the global author container from which the -/// RPC author can be accessed. We do this in a centralized manner, to allow -/// easy feature-gating of the entire sidechain/top-pool feature. -pub fn create_top_pool_rpc_author( - connection_registry: Arc, - state_handler: Arc, - ocall_api: Arc, - shielding_crypto: ShieldingKey, -) -> Arc> -where - StateHandler: QueryShardState, - ShieldingKey: ShieldingCrypto, - OCallApi: EnclaveMetricsOCallApi + Send + Sync + 'static, -{ - let rpc_responder = Arc::new(RpcResponder::new(connection_registry)); - - let side_chain_api = Arc::new(SidechainApi::::new()); - let top_pool = Arc::new(BPool::create(PoolOptions::default(), side_chain_api, rpc_responder)); - - Arc::new(Author::new(top_pool, AuthorTopFilter {}, state_handler, shielding_crypto, ocall_api)) -} diff --git a/sidechain/validateer-fetch/Cargo.toml b/sidechain/validateer-fetch/Cargo.toml index 55cbfc4f18..4640b7d651 100644 --- a/sidechain/validateer-fetch/Cargo.toml +++ b/sidechain/validateer-fetch/Cargo.toml @@ -7,20 +7,19 @@ edition = "2018" [dependencies] derive_more = "0.99.16" thiserror = "1.0.26" -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive", "chain-error"] } +codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "chain-error"] } # substrate deps -frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-core = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-runtime = { version = "5.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} -sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "master"} +frame-support = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-core = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-runtime = { version = "6.0.0", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} +sp-std = { version = "4.0.0-dev", default-features = false, git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.19"} # local deps itp-teerex-storage = { path = "../../core-primitives/teerex-storage", default-features = false } itp-types = { path = "../../core-primitives/types", default-features = false } itp-storage = { path = "../../core-primitives/storage", default-features = false } itp-ocall-api = { path = "../../core-primitives/ocall-api", default-features = false } -itp-storage-verifier = { path = "../../core-primitives/storage-verified", default-features = false } [features] default = ["std"] @@ -32,7 +31,6 @@ std = [ "itp-types/std", "itp-storage/std", "itp-ocall-api/std", - "itp-storage-verifier/std", ] [dev-dependencies] diff --git a/sidechain/validateer-fetch/src/error.rs b/sidechain/validateer-fetch/src/error.rs index 4c0b4a615c..3b5d4a3f2c 100644 --- a/sidechain/validateer-fetch/src/error.rs +++ b/sidechain/validateer-fetch/src/error.rs @@ -22,6 +22,6 @@ pub type Result = core::result::Result; #[derive(Debug, Display, From)] pub enum Error { Codec(codec::Error), - Onchain(itp_storage_verifier::Error), + Onchain(itp_ocall_api::Error), Other(&'static str), } diff --git a/sidechain/validateer-fetch/src/validateer.rs b/sidechain/validateer-fetch/src/validateer.rs index 1a690b80a9..6e380c7928 100644 --- a/sidechain/validateer-fetch/src/validateer.rs +++ b/sidechain/validateer-fetch/src/validateer.rs @@ -17,7 +17,7 @@ use crate::error::{Error, Result}; use frame_support::ensure; -use itp_storage_verifier::GetStorageVerified; +use itp_ocall_api::EnclaveOnChainOCallApi; use itp_teerex_storage::{TeeRexStorage, TeerexStorageKeys}; use itp_types::Enclave; use sp_core::H256; @@ -33,7 +33,7 @@ pub trait ValidateerFetch { -> Result; } -impl ValidateerFetch for OnchainStorage { +impl ValidateerFetch for OnchainStorage { fn current_validateers>( &self, header: &Header,